Przeglądaj źródła

tests: switch to 4 space indent

Reformat this dir by itself to help merging with conflicts with other CLs.

Reformatted using:
parallel ./yapf -i -- tests/*.py
~/chromiumos/chromite/contrib/reflow_overlong_comments tests/*.py

These files still had lines (strings) that were too long, so the pylint
warnings were suppressed with a TODO.
tests/bot_update_coverage_test.py
tests/cipd_bootstrap_test.py
tests/gclient_eval_unittest.py
tests/gclient_git_smoketest.py
tests/gclient_scm_test.py
tests/gclient_smoketest.py
tests/gclient_test.py
tests/gclient_transitions_smoketest.py
tests/gclient_utils_test.py
tests/git_cl_test.py
tests/git_hyper_blame_test.py
tests/git_rebase_update_test.py
tests/lockfile_test.py
tests/metrics_test.py
tests/presubmit_canned_checks_test.py
tests/presubmit_unittest.py
tests/roll_dep_test.py

Change-Id: I8fed04b4ba81d54b8f45da612213aad27a9e1a2c
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/4842592
Commit-Queue: Josip Sokcevic <sokcevic@chromium.org>
Reviewed-by: Josip Sokcevic <sokcevic@chromium.org>
Auto-Submit: Mike Frysinger <vapier@chromium.org>
Mike Frysinger 1 rok temu
rodzic
commit
677616322a
56 zmienionych plików z 19216 dodań i 18905 usunięć
  1. 3 0
      tests/.style.yapf
  2. 163 146
      tests/auth_test.py
  3. 56 54
      tests/autoninja_test.py
  4. 64 64
      tests/bazel_test.py
  5. 242 237
      tests/bot_update_coverage_test.py
  6. 57 53
      tests/cipd_bootstrap_test.py
  7. 34 34
      tests/detect_host_arch_test.py
  8. 451 445
      tests/download_from_google_storage_unittest.py
  9. 230 224
      tests/fetch_test.py
  10. 39 42
      tests/fix_encoding_test.py
  11. 173 158
      tests/gclient_cipd_smoketest.py
  12. 1261 1105
      tests/gclient_eval_unittest.py
  13. 136 125
      tests/gclient_git_mutates_smoketest.py
  14. 1473 1418
      tests/gclient_git_smoketest.py
  15. 247 253
      tests/gclient_no_sync_smoketest.py
  16. 173 176
      tests/gclient_paths_test.py
  17. 1446 1403
      tests/gclient_scm_test.py
  18. 160 144
      tests/gclient_smoketest.py
  19. 126 118
      tests/gclient_smoketest_base.py
  20. 469 479
      tests/gclient_test.py
  21. 234 200
      tests/gclient_transitions_smoketest.py
  22. 367 337
      tests/gclient_utils_test.py
  23. 140 138
      tests/gerrit_client_test.py
  24. 515 461
      tests/gerrit_util_test.py
  25. 256 251
      tests/git_cache_test.py
  26. 4217 4627
      tests/git_cl_test.py
  27. 500 492
      tests/git_common_test.py
  28. 65 48
      tests/git_dates_test.py
  29. 34 33
      tests/git_find_releases_test.py
  30. 247 243
      tests/git_footers_test.py
  31. 647 544
      tests/git_hyper_blame_test.py
  32. 78 75
      tests/git_map_test.py
  33. 77 76
      tests/git_migrate_default_branch_test.py
  34. 52 54
      tests/git_number_test.py
  35. 324 312
      tests/git_rebase_update_test.py
  36. 113 110
      tests/gsutil_test.py
  37. 80 76
      tests/lockfile_test.py
  38. 791 752
      tests/metrics_test.py
  39. 70 70
      tests/my_activity_test.py
  40. 271 258
      tests/ninja_reclient_test.py
  41. 160 144
      tests/ninjalog_uploader_test.py
  42. 224 226
      tests/owners_client_test.py
  43. 223 207
      tests/owners_finder_test.py
  44. 404 389
      tests/presubmit_canned_checks_test.py
  45. 536 563
      tests/presubmit_unittest.py
  46. 105 102
      tests/rdb_wrapper_test.py
  47. 4 4
      tests/recipes_test.py
  48. 236 205
      tests/reclient_metrics_test.py
  49. 180 172
      tests/roll_dep_test.py
  50. 261 252
      tests/scm_unittest.py
  51. 175 166
      tests/split_cl_test.py
  52. 131 137
      tests/subcommand_test.py
  53. 243 237
      tests/subprocess2_test.py
  54. 151 162
      tests/upload_to_google_storage_unittest.py
  55. 28 28
      tests/utils_test.py
  56. 74 76
      tests/watchlists_unittest.py

+ 3 - 0
tests/.style.yapf

@@ -0,0 +1,3 @@
+[style]
+based_on_style = pep8
+column_limit = 80

+ 163 - 146
tests/auth_test.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2017 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit Tests for auth.py"""
 
 import calendar
@@ -18,167 +17,185 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 import auth
 import subprocess2
 
-
 NOW = datetime.datetime(2019, 10, 17, 12, 30, 59, 0)
 VALID_EXPIRY = NOW + datetime.timedelta(seconds=31)
 
 
 class AuthenticatorTest(unittest.TestCase):
-  def setUp(self):
-    mock.patch('subprocess2.check_call').start()
-    mock.patch('subprocess2.check_call_out').start()
-    mock.patch('auth.datetime_now', return_value=NOW).start()
-    self.addCleanup(mock.patch.stopall)
-
-  def testHasCachedCredentials_NotLoggedIn(self):
-    subprocess2.check_call_out.side_effect = [
-        subprocess2.CalledProcessError(1, ['cmd'], 'cwd', 'stdout', 'stderr')]
-    self.assertFalse(auth.Authenticator().has_cached_credentials())
-
-  def testHasCachedCredentials_LoggedIn(self):
-    subprocess2.check_call_out.return_value = (
-        json.dumps({'token': 'token', 'expiry': 12345678}), '')
-    self.assertTrue(auth.Authenticator().has_cached_credentials())
-
-  def testGetAccessToken_NotLoggedIn(self):
-    subprocess2.check_call_out.side_effect = [
-        subprocess2.CalledProcessError(1, ['cmd'], 'cwd', 'stdout', 'stderr')]
-    self.assertRaises(
-        auth.LoginRequiredError, auth.Authenticator().get_access_token)
-
-  def testGetAccessToken_CachedToken(self):
-    authenticator = auth.Authenticator()
-    authenticator._access_token = auth.AccessToken('token', None)
-    self.assertEqual(
-        auth.AccessToken('token', None), authenticator.get_access_token())
-    subprocess2.check_call_out.assert_not_called()
-
-  def testGetAccesstoken_LoggedIn(self):
-    expiry = calendar.timegm(VALID_EXPIRY.timetuple())
-    subprocess2.check_call_out.return_value = (
-        json.dumps({'token': 'token', 'expiry': expiry}), '')
-    self.assertEqual(
-        auth.AccessToken('token', VALID_EXPIRY),
-        auth.Authenticator().get_access_token())
-    subprocess2.check_call_out.assert_called_with(
-        ['luci-auth',
-         'token',
-         '-scopes', auth.OAUTH_SCOPE_EMAIL,
-         '-json-output', '-'],
-        stdout=subprocess2.PIPE, stderr=subprocess2.PIPE)
-
-  def testGetAccessToken_DifferentScope(self):
-    expiry = calendar.timegm(VALID_EXPIRY.timetuple())
-    subprocess2.check_call_out.return_value = (
-        json.dumps({'token': 'token', 'expiry': expiry}), '')
-    self.assertEqual(
-        auth.AccessToken('token', VALID_EXPIRY),
-        auth.Authenticator('custom scopes').get_access_token())
-    subprocess2.check_call_out.assert_called_with(
-        ['luci-auth', 'token', '-scopes', 'custom scopes', '-json-output', '-'],
-        stdout=subprocess2.PIPE, stderr=subprocess2.PIPE)
-
-  def testAuthorize(self):
-    http = mock.Mock()
-    http_request = http.request
-    http_request.__name__ = '__name__'
-
-    authenticator = auth.Authenticator()
-    authenticator._access_token = auth.AccessToken('token', None)
-
-    authorized = authenticator.authorize(http)
-    authorized.request(
-        'https://example.com', method='POST', body='body',
-        headers={'header': 'value'})
-    http_request.assert_called_once_with(
-        'https://example.com', 'POST', 'body',
-        {'header': 'value', 'Authorization': 'Bearer token'}, mock.ANY,
-        mock.ANY)
+    def setUp(self):
+        mock.patch('subprocess2.check_call').start()
+        mock.patch('subprocess2.check_call_out').start()
+        mock.patch('auth.datetime_now', return_value=NOW).start()
+        self.addCleanup(mock.patch.stopall)
+
+    def testHasCachedCredentials_NotLoggedIn(self):
+        subprocess2.check_call_out.side_effect = [
+            subprocess2.CalledProcessError(1, ['cmd'], 'cwd', 'stdout',
+                                           'stderr')
+        ]
+        self.assertFalse(auth.Authenticator().has_cached_credentials())
+
+    def testHasCachedCredentials_LoggedIn(self):
+        subprocess2.check_call_out.return_value = (json.dumps({
+            'token': 'token',
+            'expiry': 12345678
+        }), '')
+        self.assertTrue(auth.Authenticator().has_cached_credentials())
+
+    def testGetAccessToken_NotLoggedIn(self):
+        subprocess2.check_call_out.side_effect = [
+            subprocess2.CalledProcessError(1, ['cmd'], 'cwd', 'stdout',
+                                           'stderr')
+        ]
+        self.assertRaises(auth.LoginRequiredError,
+                          auth.Authenticator().get_access_token)
+
+    def testGetAccessToken_CachedToken(self):
+        authenticator = auth.Authenticator()
+        authenticator._access_token = auth.AccessToken('token', None)
+        self.assertEqual(auth.AccessToken('token', None),
+                         authenticator.get_access_token())
+        subprocess2.check_call_out.assert_not_called()
+
+    def testGetAccesstoken_LoggedIn(self):
+        expiry = calendar.timegm(VALID_EXPIRY.timetuple())
+        subprocess2.check_call_out.return_value = (json.dumps({
+            'token': 'token',
+            'expiry': expiry
+        }), '')
+        self.assertEqual(auth.AccessToken('token', VALID_EXPIRY),
+                         auth.Authenticator().get_access_token())
+        subprocess2.check_call_out.assert_called_with([
+            'luci-auth', 'token', '-scopes', auth.OAUTH_SCOPE_EMAIL,
+            '-json-output', '-'
+        ],
+                                                      stdout=subprocess2.PIPE,
+                                                      stderr=subprocess2.PIPE)
+
+    def testGetAccessToken_DifferentScope(self):
+        expiry = calendar.timegm(VALID_EXPIRY.timetuple())
+        subprocess2.check_call_out.return_value = (json.dumps({
+            'token': 'token',
+            'expiry': expiry
+        }), '')
+        self.assertEqual(auth.AccessToken('token', VALID_EXPIRY),
+                         auth.Authenticator('custom scopes').get_access_token())
+        subprocess2.check_call_out.assert_called_with([
+            'luci-auth', 'token', '-scopes', 'custom scopes', '-json-output',
+            '-'
+        ],
+                                                      stdout=subprocess2.PIPE,
+                                                      stderr=subprocess2.PIPE)
+
+    def testAuthorize(self):
+        http = mock.Mock()
+        http_request = http.request
+        http_request.__name__ = '__name__'
+
+        authenticator = auth.Authenticator()
+        authenticator._access_token = auth.AccessToken('token', None)
+
+        authorized = authenticator.authorize(http)
+        authorized.request('https://example.com',
+                           method='POST',
+                           body='body',
+                           headers={'header': 'value'})
+        http_request.assert_called_once_with('https://example.com', 'POST',
+                                             'body', {
+                                                 'header': 'value',
+                                                 'Authorization': 'Bearer token'
+                                             }, mock.ANY, mock.ANY)
 
 
 class AccessTokenTest(unittest.TestCase):
-  def setUp(self):
-    mock.patch('auth.datetime_now', return_value=NOW).start()
-    self.addCleanup(mock.patch.stopall)
+    def setUp(self):
+        mock.patch('auth.datetime_now', return_value=NOW).start()
+        self.addCleanup(mock.patch.stopall)
 
-  def testNeedsRefresh_NoExpiry(self):
-    self.assertFalse(auth.AccessToken('token', None).needs_refresh())
+    def testNeedsRefresh_NoExpiry(self):
+        self.assertFalse(auth.AccessToken('token', None).needs_refresh())
 
-  def testNeedsRefresh_Expired(self):
-    expired = NOW + datetime.timedelta(seconds=30)
-    self.assertTrue(auth.AccessToken('token', expired).needs_refresh())
+    def testNeedsRefresh_Expired(self):
+        expired = NOW + datetime.timedelta(seconds=30)
+        self.assertTrue(auth.AccessToken('token', expired).needs_refresh())
 
-  def testNeedsRefresh_Valid(self):
-    self.assertFalse(auth.AccessToken('token', VALID_EXPIRY).needs_refresh())
+    def testNeedsRefresh_Valid(self):
+        self.assertFalse(
+            auth.AccessToken('token', VALID_EXPIRY).needs_refresh())
 
 
 class HasLuciContextLocalAuthTest(unittest.TestCase):
-  def setUp(self):
-    mock.patch('os.environ').start()
-    mock.patch('builtins.open', mock.mock_open()).start()
-    self.addCleanup(mock.patch.stopall)
-
-  def testNoLuciContextEnvVar(self):
-    os.environ = {}
-    self.assertFalse(auth.has_luci_context_local_auth())
-
-  def testNonexistentPath(self):
-    os.environ = {'LUCI_CONTEXT': 'path'}
-    open.side_effect = OSError
-    self.assertFalse(auth.has_luci_context_local_auth())
-    open.assert_called_with('path')
-
-  def testInvalidJsonFile(self):
-    os.environ = {'LUCI_CONTEXT': 'path'}
-    open().read.return_value = 'not-a-json-file'
-    self.assertFalse(auth.has_luci_context_local_auth())
-    open.assert_called_with('path')
-
-  def testNoLocalAuth(self):
-    os.environ = {'LUCI_CONTEXT': 'path'}
-    open().read.return_value = '{}'
-    self.assertFalse(auth.has_luci_context_local_auth())
-    open.assert_called_with('path')
-
-  def testNoDefaultAccountId(self):
-    os.environ = {'LUCI_CONTEXT': 'path'}
-    open().read.return_value = json.dumps({
-        'local_auth': {
-            'secret': 'secret',
-            'accounts': [{
-                'email': 'bots@account.iam.gserviceaccount.com',
-                'id': 'system',
-            }],
-            'rpc_port': 1234,
-        }
-    })
-    self.assertFalse(auth.has_luci_context_local_auth())
-    open.assert_called_with('path')
-
-  def testHasLocalAuth(self):
-    os.environ = {'LUCI_CONTEXT': 'path'}
-    open().read.return_value = json.dumps({
-        'local_auth': {
-            'secret': 'secret',
-            'accounts': [
-                {
+    def setUp(self):
+        mock.patch('os.environ').start()
+        mock.patch('builtins.open', mock.mock_open()).start()
+        self.addCleanup(mock.patch.stopall)
+
+    def testNoLuciContextEnvVar(self):
+        os.environ = {}
+        self.assertFalse(auth.has_luci_context_local_auth())
+
+    def testNonexistentPath(self):
+        os.environ = {'LUCI_CONTEXT': 'path'}
+        open.side_effect = OSError
+        self.assertFalse(auth.has_luci_context_local_auth())
+        open.assert_called_with('path')
+
+    def testInvalidJsonFile(self):
+        os.environ = {'LUCI_CONTEXT': 'path'}
+        open().read.return_value = 'not-a-json-file'
+        self.assertFalse(auth.has_luci_context_local_auth())
+        open.assert_called_with('path')
+
+    def testNoLocalAuth(self):
+        os.environ = {'LUCI_CONTEXT': 'path'}
+        open().read.return_value = '{}'
+        self.assertFalse(auth.has_luci_context_local_auth())
+        open.assert_called_with('path')
+
+    def testNoDefaultAccountId(self):
+        os.environ = {'LUCI_CONTEXT': 'path'}
+        open().read.return_value = json.dumps({
+            'local_auth': {
+                'secret':
+                'secret',
+                'accounts': [{
                     'email': 'bots@account.iam.gserviceaccount.com',
                     'id': 'system',
-                },
-                {
-                    'email': 'builder@account.iam.gserviceaccount.com',
-                    'id': 'task',
-                },
-            ],
-            'rpc_port': 1234,
-            'default_account_id': 'task',
-        },
-    })
-    self.assertTrue(auth.has_luci_context_local_auth())
-    open.assert_called_with('path')
+                }],
+                'rpc_port':
+                1234,
+            }
+        })
+        self.assertFalse(auth.has_luci_context_local_auth())
+        open.assert_called_with('path')
+
+    def testHasLocalAuth(self):
+        os.environ = {'LUCI_CONTEXT': 'path'}
+        open().read.return_value = json.dumps({
+            'local_auth': {
+                'secret':
+                'secret',
+                'accounts': [
+                    {
+                        'email': 'bots@account.iam.gserviceaccount.com',
+                        'id': 'system',
+                    },
+                    {
+                        'email': 'builder@account.iam.gserviceaccount.com',
+                        'id': 'task',
+                    },
+                ],
+                'rpc_port':
+                1234,
+                'default_account_id':
+                'task',
+            },
+        })
+        self.assertTrue(auth.has_luci_context_local_auth())
+        open.assert_called_with('path')
 
 
 if __name__ == '__main__':
-  if '-v' in sys.argv:
-    logging.basicConfig(level=logging.DEBUG)
-  unittest.main()
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()

+ 56 - 54
tests/autoninja_test.py

@@ -18,63 +18,65 @@ from testing_support import trial_dir
 
 
 def write(filename, content):
-  """Writes the content of a file and create the directories as needed."""
-  filename = os.path.abspath(filename)
-  dirname = os.path.dirname(filename)
-  if not os.path.isdir(dirname):
-    os.makedirs(dirname)
-  with open(filename, 'w') as f:
-    f.write(content)
+    """Writes the content of a file and create the directories as needed."""
+    filename = os.path.abspath(filename)
+    dirname = os.path.dirname(filename)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+    with open(filename, 'w') as f:
+        f.write(content)
 
 
 class AutoninjaTest(trial_dir.TestCase):
-  def setUp(self):
-    super(AutoninjaTest, self).setUp()
-    self.previous_dir = os.getcwd()
-    os.chdir(self.root_dir)
-
-  def tearDown(self):
-    os.chdir(self.previous_dir)
-    super(AutoninjaTest, self).tearDown()
-
-  def test_autoninja(self):
-    autoninja.main([])
-
-  def test_autoninja_goma(self):
-    with unittest.mock.patch(
-        'subprocess.call',
-        return_value=0) as mock_call, unittest.mock.patch.dict(
-            os.environ, {"GOMA_DIR": os.path.join(self.root_dir, 'goma_dir')}):
-      out_dir = os.path.join('out', 'dir')
-      write(os.path.join(out_dir, 'args.gn'), 'use_goma=true')
-      write(
-          os.path.join(
-              'goma_dir',
-              'gomacc.exe' if sys.platform.startswith('win') else 'gomacc'),
-          'content')
-      args = autoninja.main(['autoninja.py', '-C', out_dir]).split()
-      mock_call.assert_called_once()
-
-    self.assertIn('-j', args)
-    parallel_j = int(args[args.index('-j') + 1])
-    self.assertGreater(parallel_j, multiprocessing.cpu_count())
-    self.assertIn(os.path.join(autoninja.SCRIPT_DIR, 'ninja.py'), args)
-
-  def test_autoninja_reclient(self):
-    out_dir = os.path.join('out', 'dir')
-    write(os.path.join(out_dir, 'args.gn'), 'use_remoteexec=true')
-    write('.gclient', '')
-    write('.gclient_entries', 'entries = {"buildtools": "..."}')
-    write(os.path.join('buildtools', 'reclient_cfgs', 'reproxy.cfg'), 'RBE_v=2')
-    write(os.path.join('buildtools', 'reclient', 'version.txt'), '0.0')
-
-    args = autoninja.main(['autoninja.py', '-C', out_dir]).split()
-
-    self.assertIn('-j', args)
-    parallel_j = int(args[args.index('-j') + 1])
-    self.assertGreater(parallel_j, multiprocessing.cpu_count())
-    self.assertIn(os.path.join(autoninja.SCRIPT_DIR, 'ninja_reclient.py'), args)
+    def setUp(self):
+        super(AutoninjaTest, self).setUp()
+        self.previous_dir = os.getcwd()
+        os.chdir(self.root_dir)
+
+    def tearDown(self):
+        os.chdir(self.previous_dir)
+        super(AutoninjaTest, self).tearDown()
+
+    def test_autoninja(self):
+        autoninja.main([])
+
+    def test_autoninja_goma(self):
+        with unittest.mock.patch(
+                'subprocess.call',
+                return_value=0) as mock_call, unittest.mock.patch.dict(
+                    os.environ,
+                    {"GOMA_DIR": os.path.join(self.root_dir, 'goma_dir')}):
+            out_dir = os.path.join('out', 'dir')
+            write(os.path.join(out_dir, 'args.gn'), 'use_goma=true')
+            write(
+                os.path.join(
+                    'goma_dir', 'gomacc.exe'
+                    if sys.platform.startswith('win') else 'gomacc'), 'content')
+            args = autoninja.main(['autoninja.py', '-C', out_dir]).split()
+            mock_call.assert_called_once()
+
+        self.assertIn('-j', args)
+        parallel_j = int(args[args.index('-j') + 1])
+        self.assertGreater(parallel_j, multiprocessing.cpu_count())
+        self.assertIn(os.path.join(autoninja.SCRIPT_DIR, 'ninja.py'), args)
+
+    def test_autoninja_reclient(self):
+        out_dir = os.path.join('out', 'dir')
+        write(os.path.join(out_dir, 'args.gn'), 'use_remoteexec=true')
+        write('.gclient', '')
+        write('.gclient_entries', 'entries = {"buildtools": "..."}')
+        write(os.path.join('buildtools', 'reclient_cfgs', 'reproxy.cfg'),
+              'RBE_v=2')
+        write(os.path.join('buildtools', 'reclient', 'version.txt'), '0.0')
+
+        args = autoninja.main(['autoninja.py', '-C', out_dir]).split()
+
+        self.assertIn('-j', args)
+        parallel_j = int(args[args.index('-j') + 1])
+        self.assertGreater(parallel_j, multiprocessing.cpu_count())
+        self.assertIn(os.path.join(autoninja.SCRIPT_DIR, 'ninja_reclient.py'),
+                      args)
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 64 - 64
tests/bazel_test.py

@@ -22,73 +22,73 @@ from testing_support import trial_dir
 
 
 class FindCrosUnittest(trial_dir.TestCase):
-  """Test the _find_bazel_cros function."""
-  def setUp(self):
-    """Create the checkout and chromite files."""
-    super().setUp()
-    self.checkout_dir = Path(self.root_dir) / "chromiumos"
-    self.chromite_dir = self.checkout_dir / "chromite"
-    self.launcher = self.chromite_dir / "bin" / "bazel"
-    self.launcher.parent.mkdir(exist_ok=True, parents=True)
-    self.launcher.write_bytes(b"")
-    self.launcher.chmod(0o775)
-    self.orig_dir = Path.cwd()
-
-  def tearDown(self):
-    os.chdir(self.orig_dir)
-    super().tearDown()
-
-  def test_at_checkout_base(self):
-    """Test we find the launcher at the base of the checkout."""
-    os.chdir(self.checkout_dir)
-    self.assertEqual(bazel._find_bazel_cros(), self.launcher)
-
-  def test_in_checkout_subdir(self):
-    """Test we find the launcher in a subdir of the checkout."""
-    os.chdir(self.chromite_dir)
-    self.assertEqual(bazel._find_bazel_cros(), self.launcher)
-
-  def test_out_of_checkout(self):
-    """Test we don't find the launcher outside of the checkout."""
-    os.chdir(self.root_dir)
-    self.assertIsNone(bazel._find_bazel_cros())
+    """Test the _find_bazel_cros function."""
+    def setUp(self):
+        """Create the checkout and chromite files."""
+        super().setUp()
+        self.checkout_dir = Path(self.root_dir) / "chromiumos"
+        self.chromite_dir = self.checkout_dir / "chromite"
+        self.launcher = self.chromite_dir / "bin" / "bazel"
+        self.launcher.parent.mkdir(exist_ok=True, parents=True)
+        self.launcher.write_bytes(b"")
+        self.launcher.chmod(0o775)
+        self.orig_dir = Path.cwd()
+
+    def tearDown(self):
+        os.chdir(self.orig_dir)
+        super().tearDown()
+
+    def test_at_checkout_base(self):
+        """Test we find the launcher at the base of the checkout."""
+        os.chdir(self.checkout_dir)
+        self.assertEqual(bazel._find_bazel_cros(), self.launcher)
+
+    def test_in_checkout_subdir(self):
+        """Test we find the launcher in a subdir of the checkout."""
+        os.chdir(self.chromite_dir)
+        self.assertEqual(bazel._find_bazel_cros(), self.launcher)
+
+    def test_out_of_checkout(self):
+        """Test we don't find the launcher outside of the checkout."""
+        os.chdir(self.root_dir)
+        self.assertIsNone(bazel._find_bazel_cros())
 
 
 class FindPathUnittest(trial_dir.TestCase):
-  """Test the _find_next_bazel_in_path function."""
-  def setUp(self):
-    """Create the checkout and chromite files."""
-    super().setUp()
-
-    self.bin_dir = Path(self.root_dir) / "bin"
-    self.bin_dir.mkdir(exist_ok=True, parents=True)
-    self.orig_path = os.environ.get("PATH", os.defpath)
-
-    # DEPOT_TOOLS_DIR is located twice in PATH for spice.
-    os.environ["PATH"] = os.pathsep.join([
-        str(DEPOT_TOOLS_DIR),
-        str(self.bin_dir),
-        str(DEPOT_TOOLS_DIR),
-    ])
-
-  def tearDown(self):
-    """Restore actions from setUp()."""
-    os.environ["PATH"] = self.orig_path
-
-  def test_not_in_path(self):
-    """Test we don't find anything in PATH when not present."""
-    self.assertIsNone(bazel._find_next_bazel_in_path())
-
-  def test_in_path(self):
-    """Test we find the next Bazel in PATH when present."""
-    if sys.platform == "win32":
-      launcher = self.bin_dir / "bazel.exe"
-    else:
-      launcher = self.bin_dir / "bazel"
-    launcher.write_bytes(b"")
-    launcher.chmod(0o755)
-    self.assertEqual(bazel._find_next_bazel_in_path(), launcher)
+    """Test the _find_next_bazel_in_path function."""
+    def setUp(self):
+        """Create the checkout and chromite files."""
+        super().setUp()
+
+        self.bin_dir = Path(self.root_dir) / "bin"
+        self.bin_dir.mkdir(exist_ok=True, parents=True)
+        self.orig_path = os.environ.get("PATH", os.defpath)
+
+        # DEPOT_TOOLS_DIR is located twice in PATH for spice.
+        os.environ["PATH"] = os.pathsep.join([
+            str(DEPOT_TOOLS_DIR),
+            str(self.bin_dir),
+            str(DEPOT_TOOLS_DIR),
+        ])
+
+    def tearDown(self):
+        """Restore actions from setUp()."""
+        os.environ["PATH"] = self.orig_path
+
+    def test_not_in_path(self):
+        """Test we don't find anything in PATH when not present."""
+        self.assertIsNone(bazel._find_next_bazel_in_path())
+
+    def test_in_path(self):
+        """Test we find the next Bazel in PATH when present."""
+        if sys.platform == "win32":
+            launcher = self.bin_dir / "bazel.exe"
+        else:
+            launcher = self.bin_dir / "bazel"
+        launcher.write_bytes(b"")
+        launcher.chmod(0o755)
+        self.assertEqual(bazel._find_next_bazel_in_path(), launcher)
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 242 - 237
tests/bot_update_coverage_test.py

@@ -9,51 +9,55 @@ import os
 import sys
 import unittest
 
-sys.path.insert(0, os.path.join(
-    os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
-    'recipes', 'recipe_modules', 'bot_update', 'resources'))
+sys.path.insert(
+    0,
+    os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
+                 'recipes', 'recipe_modules', 'bot_update', 'resources'))
 import bot_update
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class MockedPopen(object):
-  """A fake instance of a called subprocess.
+    """A fake instance of a called subprocess.
 
   This is meant to be used in conjunction with MockedCall.
   """
-  def __init__(self, args=None, kwargs=None):
-    self.args = args or []
-    self.kwargs = kwargs or {}
-    self.return_value = None
-    self.fails = False
+    def __init__(self, args=None, kwargs=None):
+        self.args = args or []
+        self.kwargs = kwargs or {}
+        self.return_value = None
+        self.fails = False
 
-  def returns(self, rv):
-    """Set the return value when this popen is called.
+    def returns(self, rv):
+        """Set the return value when this popen is called.
 
     rv can be a string, or a callable (eg function).
     """
-    self.return_value = rv
-    return self
+        self.return_value = rv
+        return self
 
-  def check(self, args, kwargs):
-    """Check to see if the given args/kwargs call match this instance.
+    def check(self, args, kwargs):
+        """Check to see if the given args/kwargs call match this instance.
 
     This does a partial match, so that a call to "git clone foo" will match
     this instance if this instance was recorded as "git clone"
     """
-    if any(input_arg != expected_arg
-           for (input_arg, expected_arg) in zip(args, self.args)):
-      return False
-    return self.return_value
+        if any(input_arg != expected_arg
+               for (input_arg, expected_arg) in zip(args, self.args)):
+            return False
+        return self.return_value
 
-  def __call__(self, args, kwargs):
-    """Actually call this popen instance."""
-    if hasattr(self.return_value, '__call__'):
-      return self.return_value(*args, **kwargs)
-    return self.return_value
+    def __call__(self, args, kwargs):
+        """Actually call this popen instance."""
+        if hasattr(self.return_value, '__call__'):
+            return self.return_value(*args, **kwargs)
+        return self.return_value
 
 
 class MockedCall(object):
-  """A fake instance of bot_update.call().
+    """A fake instance of bot_update.call().
 
   This object is pre-seeded with "answers" in self.expectations.  The type
   is a MockedPopen object, or any object with a __call__() and check() method.
@@ -63,241 +67,242 @@ class MockedCall(object):
   By default, if no answers have been pre-seeded, the call() returns successful
   with an empty string.
   """
-  def __init__(self, fake_filesystem):
-    self.expectations = []
-    self.records = []
-
-  def expect(self, args=None, kwargs=None):
-    args = args or []
-    kwargs = kwargs or {}
-    popen = MockedPopen(args, kwargs)
-    self.expectations.append(popen)
-    return popen
-
-  def __call__(self, *args, **kwargs):
-    self.records.append((args, kwargs))
-    for popen in self.expectations:
-      if popen.check(args, kwargs):
-        self.expectations.remove(popen)
-        return popen(args, kwargs)
-    return ''
+    def __init__(self, fake_filesystem):
+        self.expectations = []
+        self.records = []
+
+    def expect(self, args=None, kwargs=None):
+        args = args or []
+        kwargs = kwargs or {}
+        popen = MockedPopen(args, kwargs)
+        self.expectations.append(popen)
+        return popen
+
+    def __call__(self, *args, **kwargs):
+        self.records.append((args, kwargs))
+        for popen in self.expectations:
+            if popen.check(args, kwargs):
+                self.expectations.remove(popen)
+                return popen(args, kwargs)
+        return ''
 
 
 class MockedGclientSync():
-  """A class producing a callable instance of gclient sync."""
-  def __init__(self, fake_filesystem):
-    self.records = []
+    """A class producing a callable instance of gclient sync."""
+    def __init__(self, fake_filesystem):
+        self.records = []
 
-  def __call__(self, *args, **_):
-    self.records.append(args)
+    def __call__(self, *args, **_):
+        self.records.append(args)
 
 
 class FakeFile():
-  def __init__(self):
-    self.contents = ''
+    def __init__(self):
+        self.contents = ''
 
-  def write(self, buf):
-    self.contents += buf
+    def write(self, buf):
+        self.contents += buf
 
-  def read(self):
-    return self.contents
+    def read(self):
+        return self.contents
 
-  def __enter__(self):
-    return self
+    def __enter__(self):
+        return self
 
-  def __exit__(self, _, __, ___):
-    pass
+    def __exit__(self, _, __, ___):
+        pass
 
 
 class FakeFilesystem():
-  def __init__(self):
-    self.files = {}
+    def __init__(self):
+        self.files = {}
 
-  def open(self, target, mode='r', encoding=None):
-    if 'w' in mode:
-      self.files[target] = FakeFile()
-      return self.files[target]
-    return self.files[target]
+    def open(self, target, mode='r', encoding=None):
+        if 'w' in mode:
+            self.files[target] = FakeFile()
+            return self.files[target]
+        return self.files[target]
 
 
 def fake_git(*args, **kwargs):
-  return bot_update.call('git', *args, **kwargs)
+    return bot_update.call('git', *args, **kwargs)
 
 
 class BotUpdateUnittests(unittest.TestCase):
-  DEFAULT_PARAMS = {
-      'solutions': [{
-          'name': 'somename',
-          'url': 'https://fake.com'
-      }],
-      'revisions': {},
-      'first_sln': 'somename',
-      'target_os': None,
-      'target_os_only': None,
-      'target_cpu': None,
-      'patch_root': None,
-      'patch_refs': [],
-      'gerrit_rebase_patch_ref': None,
-      'no_fetch_tags': False,
-      'refs': [],
-      'git_cache_dir': '',
-      'cleanup_dir': None,
-      'gerrit_reset': None,
-      'enforce_fetch': False,
-      'experiments': [],
-  }
-
-  def setUp(self):
-    sys.platform = 'linux2'  # For consistency, ya know?
-    self.filesystem = FakeFilesystem()
-    self.call = MockedCall(self.filesystem)
-    self.gclient = MockedGclientSync(self.filesystem)
-    self.call.expect(
-        (sys.executable, '-u', bot_update.GCLIENT_PATH, 'sync')
-    ).returns(self.gclient)
-    self.old_call = getattr(bot_update, 'call')
-    self.params = copy.deepcopy(self.DEFAULT_PARAMS)
-    setattr(bot_update, 'call', self.call)
-    setattr(bot_update, 'git', fake_git)
-
-    self.old_os_cwd = os.getcwd
-    setattr(os, 'getcwd', lambda: '/b/build/foo/build')
-
-    setattr(bot_update, 'open', self.filesystem.open)
-    self.old_codecs_open = codecs.open
-    setattr(codecs, 'open', self.filesystem.open)
-
-  def tearDown(self):
-    setattr(bot_update, 'call', self.old_call)
-    setattr(os, 'getcwd', self.old_os_cwd)
-    delattr(bot_update, 'open')
-    setattr(codecs, 'open', self.old_codecs_open)
-
-  def overrideSetupForWindows(self):
-    sys.platform = 'win'
-    self.call.expect(
-        (sys.executable, '-u', bot_update.GCLIENT_PATH, 'sync')
-    ).returns(self.gclient)
-
-  def testBasic(self):
-    bot_update.ensure_checkout(**self.params)
-    return self.call.records
-
-  def testBasicCachepackOffloading(self):
-    os.environ['PACKFILE_OFFLOADING'] = '1'
-    bot_update.ensure_checkout(**self.params)
-    os.environ.pop('PACKFILE_OFFLOADING')
-    return self.call.records
-
-  def testBasicRevision(self):
-    self.params['revisions'] = {
-        'src': 'HEAD', 'src/v8': 'deadbeef', 'somename': 'DNE'}
-    bot_update.ensure_checkout(**self.params)
-    args = self.gclient.records[0]
-    idx_first_revision = args.index('--revision')
-    idx_second_revision = args.index(
-        '--revision', idx_first_revision+1)
-    idx_third_revision = args.index('--revision', idx_second_revision+1)
-    self.assertEqual(args[idx_first_revision+1], 'somename@unmanaged')
-    self.assertEqual(
-        args[idx_second_revision+1], 'src@refs/remotes/origin/main')
-    self.assertEqual(args[idx_third_revision+1], 'src/v8@deadbeef')
-    return self.call.records
-
-  def testTagsByDefault(self):
-    bot_update.ensure_checkout(**self.params)
-    found = False
-    for record in self.call.records:
-      args = record[0]
-      if args[:3] == ('git', 'cache', 'populate'):
-        self.assertFalse('--no-fetch-tags' in args)
-        found = True
-    self.assertTrue(found)
-    return self.call.records
-
-  def testNoTags(self):
-    params = self.params
-    params['no_fetch_tags'] = True
-    bot_update.ensure_checkout(**params)
-    found = False
-    for record in self.call.records:
-      args = record[0]
-      if args[:3] == ('git', 'cache', 'populate'):
-        self.assertTrue('--no-fetch-tags' in args)
-        found = True
-    self.assertTrue(found)
-    return self.call.records
-
-  def testGclientNoSyncExperiment(self):
-    ref = 'refs/changes/12/345/6'
-    repo = 'https://chromium.googlesource.com/v8/v8'
-    self.params['patch_refs'] = ['%s@%s' % (repo, ref)]
-    self.params['experiments'] = bot_update.EXP_NO_SYNC
-    bot_update.ensure_checkout(**self.params)
-    args = self.gclient.records[0]
-    idx = args.index('--experiment')
-    self.assertEqual(args[idx+1], bot_update.EXP_NO_SYNC)
-
-  def testApplyPatchOnGclient(self):
-    ref = 'refs/changes/12/345/6'
-    repo = 'https://chromium.googlesource.com/v8/v8'
-    self.params['patch_refs'] = ['%s@%s' % (repo, ref)]
-    bot_update.ensure_checkout(**self.params)
-    args = self.gclient.records[0]
-    idx = args.index('--patch-ref')
-    self.assertEqual(args[idx+1], self.params['patch_refs'][0])
-    self.assertNotIn('--patch-ref', args[idx+1:])
-    # Assert we're not patching in bot_update.py
-    for record in self.call.records:
-      self.assertNotIn('git fetch ' + repo,
-                       ' '.join(record[0]))
-
-  def testPatchRefs(self):
-    self.params['patch_refs'] = [
-        'https://chromium.googlesource.com/chromium/src@refs/changes/12/345/6',
-        'https://chromium.googlesource.com/v8/v8@refs/changes/1/234/56']
-    bot_update.ensure_checkout(**self.params)
-    args = self.gclient.records[0]
-    patch_refs = set(
-        args[i+1] for i in range(len(args))
-        if args[i] == '--patch-ref' and i+1 < len(args))
-    self.assertIn(self.params['patch_refs'][0], patch_refs)
-    self.assertIn(self.params['patch_refs'][1], patch_refs)
-
-  def testGitCheckoutBreaksLocks(self):
-    self.overrideSetupForWindows()
-    path = '/b/build/foo/build/.git'
-    lockfile = 'index.lock'
-    removed = []
-    old_os_walk = os.walk
-    old_os_remove = os.remove
-    setattr(os, 'walk', lambda _: [(path, None, [lockfile])])
-    setattr(os, 'remove', removed.append)
-    bot_update.ensure_checkout(**self.params)
-    setattr(os, 'walk', old_os_walk)
-    setattr(os, 'remove', old_os_remove)
-    self.assertTrue(os.path.join(path, lockfile) in removed)
-
-  def testParsesRevisions(self):
-    revisions = [
-      'f671d3baeb64d9dba628ad582e867cf1aebc0207',
-      'src@deadbeef',
-      'https://foo.googlesource.com/bar@12345',
-      'bar@refs/experimental/test@example.com/test',
-    ]
-    expected_results = {
-      'root': 'f671d3baeb64d9dba628ad582e867cf1aebc0207',
-      'src': 'deadbeef',
-      'https://foo.googlesource.com/bar.git': '12345',
-      'bar': 'refs/experimental/test@example.com/test',
+    DEFAULT_PARAMS = {
+        'solutions': [{
+            'name': 'somename',
+            'url': 'https://fake.com'
+        }],
+        'revisions': {},
+        'first_sln': 'somename',
+        'target_os': None,
+        'target_os_only': None,
+        'target_cpu': None,
+        'patch_root': None,
+        'patch_refs': [],
+        'gerrit_rebase_patch_ref': None,
+        'no_fetch_tags': False,
+        'refs': [],
+        'git_cache_dir': '',
+        'cleanup_dir': None,
+        'gerrit_reset': None,
+        'enforce_fetch': False,
+        'experiments': [],
     }
-    actual_results = bot_update.parse_revisions(revisions, 'root')
-    self.assertEqual(expected_results, actual_results)
+
+    def setUp(self):
+        sys.platform = 'linux2'  # For consistency, ya know?
+        self.filesystem = FakeFilesystem()
+        self.call = MockedCall(self.filesystem)
+        self.gclient = MockedGclientSync(self.filesystem)
+        self.call.expect((sys.executable, '-u', bot_update.GCLIENT_PATH,
+                          'sync')).returns(self.gclient)
+        self.old_call = getattr(bot_update, 'call')
+        self.params = copy.deepcopy(self.DEFAULT_PARAMS)
+        setattr(bot_update, 'call', self.call)
+        setattr(bot_update, 'git', fake_git)
+
+        self.old_os_cwd = os.getcwd
+        setattr(os, 'getcwd', lambda: '/b/build/foo/build')
+
+        setattr(bot_update, 'open', self.filesystem.open)
+        self.old_codecs_open = codecs.open
+        setattr(codecs, 'open', self.filesystem.open)
+
+    def tearDown(self):
+        setattr(bot_update, 'call', self.old_call)
+        setattr(os, 'getcwd', self.old_os_cwd)
+        delattr(bot_update, 'open')
+        setattr(codecs, 'open', self.old_codecs_open)
+
+    def overrideSetupForWindows(self):
+        sys.platform = 'win'
+        self.call.expect((sys.executable, '-u', bot_update.GCLIENT_PATH,
+                          'sync')).returns(self.gclient)
+
+    def testBasic(self):
+        bot_update.ensure_checkout(**self.params)
+        return self.call.records
+
+    def testBasicCachepackOffloading(self):
+        os.environ['PACKFILE_OFFLOADING'] = '1'
+        bot_update.ensure_checkout(**self.params)
+        os.environ.pop('PACKFILE_OFFLOADING')
+        return self.call.records
+
+    def testBasicRevision(self):
+        self.params['revisions'] = {
+            'src': 'HEAD',
+            'src/v8': 'deadbeef',
+            'somename': 'DNE'
+        }
+        bot_update.ensure_checkout(**self.params)
+        args = self.gclient.records[0]
+        idx_first_revision = args.index('--revision')
+        idx_second_revision = args.index('--revision', idx_first_revision + 1)
+        idx_third_revision = args.index('--revision', idx_second_revision + 1)
+        self.assertEqual(args[idx_first_revision + 1], 'somename@unmanaged')
+        self.assertEqual(args[idx_second_revision + 1],
+                         'src@refs/remotes/origin/main')
+        self.assertEqual(args[idx_third_revision + 1], 'src/v8@deadbeef')
+        return self.call.records
+
+    def testTagsByDefault(self):
+        bot_update.ensure_checkout(**self.params)
+        found = False
+        for record in self.call.records:
+            args = record[0]
+            if args[:3] == ('git', 'cache', 'populate'):
+                self.assertFalse('--no-fetch-tags' in args)
+                found = True
+        self.assertTrue(found)
+        return self.call.records
+
+    def testNoTags(self):
+        params = self.params
+        params['no_fetch_tags'] = True
+        bot_update.ensure_checkout(**params)
+        found = False
+        for record in self.call.records:
+            args = record[0]
+            if args[:3] == ('git', 'cache', 'populate'):
+                self.assertTrue('--no-fetch-tags' in args)
+                found = True
+        self.assertTrue(found)
+        return self.call.records
+
+    def testGclientNoSyncExperiment(self):
+        ref = 'refs/changes/12/345/6'
+        repo = 'https://chromium.googlesource.com/v8/v8'
+        self.params['patch_refs'] = ['%s@%s' % (repo, ref)]
+        self.params['experiments'] = bot_update.EXP_NO_SYNC
+        bot_update.ensure_checkout(**self.params)
+        args = self.gclient.records[0]
+        idx = args.index('--experiment')
+        self.assertEqual(args[idx + 1], bot_update.EXP_NO_SYNC)
+
+    def testApplyPatchOnGclient(self):
+        ref = 'refs/changes/12/345/6'
+        repo = 'https://chromium.googlesource.com/v8/v8'
+        self.params['patch_refs'] = ['%s@%s' % (repo, ref)]
+        bot_update.ensure_checkout(**self.params)
+        args = self.gclient.records[0]
+        idx = args.index('--patch-ref')
+        self.assertEqual(args[idx + 1], self.params['patch_refs'][0])
+        self.assertNotIn('--patch-ref', args[idx + 1:])
+        # Assert we're not patching in bot_update.py
+        for record in self.call.records:
+            self.assertNotIn('git fetch ' + repo, ' '.join(record[0]))
+
+    def testPatchRefs(self):
+        self.params['patch_refs'] = [
+            'https://chromium.googlesource.com/chromium/src@refs/changes/12/345/6',
+            'https://chromium.googlesource.com/v8/v8@refs/changes/1/234/56'
+        ]
+        bot_update.ensure_checkout(**self.params)
+        args = self.gclient.records[0]
+        patch_refs = set(args[i + 1] for i in range(len(args))
+                         if args[i] == '--patch-ref' and i + 1 < len(args))
+        self.assertIn(self.params['patch_refs'][0], patch_refs)
+        self.assertIn(self.params['patch_refs'][1], patch_refs)
+
+    def testGitCheckoutBreaksLocks(self):
+        self.overrideSetupForWindows()
+        path = '/b/build/foo/build/.git'
+        lockfile = 'index.lock'
+        removed = []
+        old_os_walk = os.walk
+        old_os_remove = os.remove
+        setattr(os, 'walk', lambda _: [(path, None, [lockfile])])
+        setattr(os, 'remove', removed.append)
+        bot_update.ensure_checkout(**self.params)
+        setattr(os, 'walk', old_os_walk)
+        setattr(os, 'remove', old_os_remove)
+        self.assertTrue(os.path.join(path, lockfile) in removed)
+
+    def testParsesRevisions(self):
+        revisions = [
+            'f671d3baeb64d9dba628ad582e867cf1aebc0207',
+            'src@deadbeef',
+            'https://foo.googlesource.com/bar@12345',
+            'bar@refs/experimental/test@example.com/test',
+        ]
+        expected_results = {
+            'root': 'f671d3baeb64d9dba628ad582e867cf1aebc0207',
+            'src': 'deadbeef',
+            'https://foo.googlesource.com/bar.git': '12345',
+            'bar': 'refs/experimental/test@example.com/test',
+        }
+        actual_results = bot_update.parse_revisions(revisions, 'root')
+        self.assertEqual(expected_results, actual_results)
+
 
 class CallUnitTest(unittest.TestCase):
-  def testCall(self):
-    ret = bot_update.call(sys.executable, '-c', 'print(1)')
-    self.assertEqual(u'1\n', ret)
+    def testCall(self):
+        ret = bot_update.call(sys.executable, '-c', 'print(1)')
+        self.assertEqual(u'1\n', ret)
+
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 57 - 53
tests/cipd_bootstrap_test.py

@@ -10,8 +10,10 @@ import sys
 import unittest
 import tempfile
 
-ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
 
+ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 
 # CIPD client version to use for self-update from an "old" checkout to the tip.
 #
@@ -38,72 +40,74 @@ windows-amd64   sha256  3e21561b45acb2845c309a04cbedb2ce1e0567b7b24bf89857e76736
 
 
 class CipdBootstrapTest(unittest.TestCase):
-  """Tests that CIPD client can bootstrap from scratch and self-update from some
+    """Tests that CIPD client can bootstrap from scratch and self-update from some
   old version to a most recent one.
 
   WARNING: This integration test touches real network and real CIPD backend and
   downloads several megabytes of stuff.
   """
+    def setUp(self):
+        self.tempdir = tempfile.mkdtemp('depot_tools_cipd')
 
-  def setUp(self):
-    self.tempdir = tempfile.mkdtemp('depot_tools_cipd')
-
-  def tearDown(self):
-    shutil.rmtree(self.tempdir)
+    def tearDown(self):
+        shutil.rmtree(self.tempdir)
 
-  def stage_files(self, cipd_version=None, digests=None):
-    """Copies files needed for cipd bootstrap into the temp dir.
+    def stage_files(self, cipd_version=None, digests=None):
+        """Copies files needed for cipd bootstrap into the temp dir.
 
     Args:
       cipd_version: if not None, a value to put into cipd_client_version file.
     """
-    names = (
-      '.cipd_impl.ps1',
-      'cipd',
-      'cipd.bat',
-      'cipd_client_version',
-      'cipd_client_version.digests',
-    )
-    for f in names:
-      shutil.copy2(os.path.join(ROOT_DIR, f), os.path.join(self.tempdir, f))
-    if cipd_version is not None:
-      with open(os.path.join(self.tempdir, 'cipd_client_version'), 'wt') as f:
-        f.write(cipd_version+'\n')
-    if digests is not None:
-      p = os.path.join(self.tempdir, 'cipd_client_version.digests')
-      with open(p, 'wt') as f:
-        f.write(digests+'\n')
-
-  def call_cipd_help(self):
-    """Calls 'cipd help' bootstrapping the client in tempdir.
+        names = (
+            '.cipd_impl.ps1',
+            'cipd',
+            'cipd.bat',
+            'cipd_client_version',
+            'cipd_client_version.digests',
+        )
+        for f in names:
+            shutil.copy2(os.path.join(ROOT_DIR, f),
+                         os.path.join(self.tempdir, f))
+        if cipd_version is not None:
+            with open(os.path.join(self.tempdir, 'cipd_client_version'),
+                      'wt') as f:
+                f.write(cipd_version + '\n')
+        if digests is not None:
+            p = os.path.join(self.tempdir, 'cipd_client_version.digests')
+            with open(p, 'wt') as f:
+                f.write(digests + '\n')
+
+    def call_cipd_help(self):
+        """Calls 'cipd help' bootstrapping the client in tempdir.
 
     Returns (exit code, merged stdout and stderr).
     """
-    exe = 'cipd.bat' if sys.platform == 'win32' else 'cipd'
-    p = subprocess.Popen(
-        [os.path.join(self.tempdir, exe), 'help'],
-        stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    out, _ = p.communicate()
-    return p.returncode, out
-
-  def test_new_bootstrap(self):
-    """Bootstrapping the client from scratch."""
-    self.stage_files()
-    ret, out = self.call_cipd_help()
-    if ret:
-      self.fail('Bootstrap from scratch failed:\n%s' % out)
-
-  def test_self_update(self):
-    """Updating the existing client in-place."""
-    self.stage_files(cipd_version=OLD_VERSION, digests=OLD_DIGESTS)
-    ret, out = self.call_cipd_help()
-    if ret:
-      self.fail('Update to %s fails:\n%s' % (OLD_VERSION, out))
-    self.stage_files()
-    ret, out = self.call_cipd_help()
-    if ret:
-      self.fail('Update from %s to the tip fails:\n%s' % (OLD_VERSION, out))
+        exe = 'cipd.bat' if sys.platform == 'win32' else 'cipd'
+        p = subprocess.Popen([os.path.join(self.tempdir, exe), 'help'],
+                             stdout=subprocess.PIPE,
+                             stderr=subprocess.STDOUT)
+        out, _ = p.communicate()
+        return p.returncode, out
+
+    def test_new_bootstrap(self):
+        """Bootstrapping the client from scratch."""
+        self.stage_files()
+        ret, out = self.call_cipd_help()
+        if ret:
+            self.fail('Bootstrap from scratch failed:\n%s' % out)
+
+    def test_self_update(self):
+        """Updating the existing client in-place."""
+        self.stage_files(cipd_version=OLD_VERSION, digests=OLD_DIGESTS)
+        ret, out = self.call_cipd_help()
+        if ret:
+            self.fail('Update to %s fails:\n%s' % (OLD_VERSION, out))
+        self.stage_files()
+        ret, out = self.call_cipd_help()
+        if ret:
+            self.fail('Update from %s to the tip fails:\n%s' %
+                      (OLD_VERSION, out))
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 34 - 34
tests/detect_host_arch_test.py

@@ -15,40 +15,40 @@ import detect_host_arch
 
 
 class DetectHostArchTest(unittest.TestCase):
-  def setUp(self):
-    super(DetectHostArchTest, self).setUp()
-    mock.patch('platform.machine').start()
-    mock.patch('platform.processor').start()
-    mock.patch('platform.architecture').start()
-    self.addCleanup(mock.patch.stopall)
-
-  def testHostArch(self):
-    test_cases = [
-      ('ia86', '', [''], 'x86'),
-      ('i86pc', '', [''], 'x86'),
-      ('x86_64', '', [''], 'x64'),
-      ('amd64', '', [''], 'x64'),
-      ('x86_64', '', ['32bit'], 'x86'),
-      ('amd64', '', ['32bit'], 'x86'),
-      ('arm', '', [''], 'arm'),
-      ('aarch64', '', [''], 'arm64'),
-      ('aarch64', '', ['32bit'], 'arm'),
-      ('arm64', '', [''], 'arm64'),
-      ('amd64', 'ARMv8 (64-bit) Family', ['64bit', 'WindowsPE'], 'x64'),
-      ('arm64', 'ARMv8 (64-bit) Family', ['32bit', 'WindowsPE'], 'x64'),
-      ('mips64', '', [''], 'mips64'),
-      ('mips', '', [''], 'mips'),
-      ('ppc', '', [''], 'ppc'),
-      ('foo', 'powerpc', [''], 'ppc'),
-      ('s390', '', [''], 's390'),
-    ]
-
-    for machine, processor, arch, expected in test_cases:
-      platform.machine.return_value = machine
-      platform.processor.return_value = processor
-      platform.architecture.return_value = arch
-      self.assertEqual(expected, detect_host_arch.HostArch())
+    def setUp(self):
+        super(DetectHostArchTest, self).setUp()
+        mock.patch('platform.machine').start()
+        mock.patch('platform.processor').start()
+        mock.patch('platform.architecture').start()
+        self.addCleanup(mock.patch.stopall)
+
+    def testHostArch(self):
+        test_cases = [
+            ('ia86', '', [''], 'x86'),
+            ('i86pc', '', [''], 'x86'),
+            ('x86_64', '', [''], 'x64'),
+            ('amd64', '', [''], 'x64'),
+            ('x86_64', '', ['32bit'], 'x86'),
+            ('amd64', '', ['32bit'], 'x86'),
+            ('arm', '', [''], 'arm'),
+            ('aarch64', '', [''], 'arm64'),
+            ('aarch64', '', ['32bit'], 'arm'),
+            ('arm64', '', [''], 'arm64'),
+            ('amd64', 'ARMv8 (64-bit) Family', ['64bit', 'WindowsPE'], 'x64'),
+            ('arm64', 'ARMv8 (64-bit) Family', ['32bit', 'WindowsPE'], 'x64'),
+            ('mips64', '', [''], 'mips64'),
+            ('mips', '', [''], 'mips'),
+            ('ppc', '', [''], 'ppc'),
+            ('foo', 'powerpc', [''], 'ppc'),
+            ('s390', '', [''], 's390'),
+        ]
+
+        for machine, processor, arch, expected in test_cases:
+            platform.machine.return_value = machine
+            platform.processor.return_value = processor
+            platform.architecture.return_value = arch
+            self.assertEqual(expected, detect_host_arch.HostArch())
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 451 - 445
tests/download_from_google_storage_unittest.py

@@ -3,7 +3,6 @@
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # pylint: disable=protected-access
-
 """Unit tests for download_from_google_storage.py."""
 
 from __future__ import print_function
@@ -19,7 +18,6 @@ import tempfile
 import threading
 import unittest
 
-
 sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 
 import upload_to_google_storage
@@ -27,464 +25,472 @@ import download_from_google_storage
 
 # ../third_party/gsutil/gsutil
 GSUTIL_DEFAULT_PATH = os.path.join(
-    os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
-    'gsutil.py')
+    os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'gsutil.py')
 TEST_DIR = os.path.dirname(os.path.abspath(__file__))
 
 
 class GsutilMock(object):
-  def __init__(self, path, boto_path, timeout=None):
-    self.path = path
-    self.timeout = timeout
-    self.boto_path = boto_path
-    self.expected = []
-    self.history = []
-    self.lock = threading.Lock()
-
-  def add_expected(self, return_code, out, err, fn=None):
-    self.expected.append((return_code, out, err, fn))
-
-  def append_history(self, method, args):
-    self.history.append((method, args))
-
-  def call(self, *args):
-    with self.lock:
-      self.append_history('call', args)
-      if self.expected:
-        code, _out, _err, fn = self.expected.pop(0)
-        if fn:
-          fn()
-        return code
-
-      return 0
-
-  def check_call(self, *args):
-    with self.lock:
-      self.append_history('check_call', args)
-      if self.expected:
-        code, out, err, fn = self.expected.pop(0)
-        if fn:
-          fn()
-        return code, out, err
-
-      return (0, '', '')
-
-  def check_call_with_retries(self, *args):
-    return self.check_call(*args)
+    def __init__(self, path, boto_path, timeout=None):
+        self.path = path
+        self.timeout = timeout
+        self.boto_path = boto_path
+        self.expected = []
+        self.history = []
+        self.lock = threading.Lock()
+
+    def add_expected(self, return_code, out, err, fn=None):
+        self.expected.append((return_code, out, err, fn))
+
+    def append_history(self, method, args):
+        self.history.append((method, args))
+
+    def call(self, *args):
+        with self.lock:
+            self.append_history('call', args)
+            if self.expected:
+                code, _out, _err, fn = self.expected.pop(0)
+                if fn:
+                    fn()
+                return code
+
+            return 0
+
+    def check_call(self, *args):
+        with self.lock:
+            self.append_history('check_call', args)
+            if self.expected:
+                code, out, err, fn = self.expected.pop(0)
+                if fn:
+                    fn()
+                return code, out, err
+
+            return (0, '', '')
+
+    def check_call_with_retries(self, *args):
+        return self.check_call(*args)
 
 
 class ChangedWorkingDirectory(object):
-  def __init__(self, working_directory):
-    self._old_cwd = ''
-    self._working_directory = working_directory
+    def __init__(self, working_directory):
+        self._old_cwd = ''
+        self._working_directory = working_directory
 
-  def __enter__(self):
-    self._old_cwd = os.getcwd()
-    print("Enter directory = ", self._working_directory)
-    os.chdir(self._working_directory)
+    def __enter__(self):
+        self._old_cwd = os.getcwd()
+        print("Enter directory = ", self._working_directory)
+        os.chdir(self._working_directory)
 
-  def __exit__(self, *_):
-    print("Enter directory = ", self._old_cwd)
-    os.chdir(self._old_cwd)
+    def __exit__(self, *_):
+        print("Enter directory = ", self._old_cwd)
+        os.chdir(self._old_cwd)
 
 
 class GstoolsUnitTests(unittest.TestCase):
-  def setUp(self):
-    self.temp_dir = tempfile.mkdtemp(prefix='gstools_test')
-    self.base_path = os.path.join(self.temp_dir, 'test_files')
-    shutil.copytree(os.path.join(TEST_DIR, 'gstools'), self.base_path)
-
-  def tearDown(self):
-    shutil.rmtree(self.temp_dir)
-
-  def test_validate_tar_file(self):
-    lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
-    with ChangedWorkingDirectory(self.base_path):
-      # Sanity ok check.
-      tar_dir = 'ok_dir'
-      os.makedirs(os.path.join(self.base_path, tar_dir))
-      tar = 'good.tar.gz'
-      lorem_ipsum_copy = os.path.join(tar_dir, 'lorem_ipsum.txt')
-      shutil.copyfile(lorem_ipsum, lorem_ipsum_copy)
-      with tarfile.open(tar, 'w:gz') as tar:
-        tar.add(lorem_ipsum_copy)
-        self.assertTrue(
-            download_from_google_storage._validate_tar_file(tar, tar_dir))
-
-      # os.symlink doesn't exist on Windows.
-      if sys.platform != 'win32':
-        # Test no links.
-        tar_dir_link = 'for_tar_link'
-        os.makedirs(tar_dir_link)
-        link = os.path.join(tar_dir_link, 'link')
-        os.symlink(lorem_ipsum, link)
-        tar_with_links = 'with_links.tar.gz'
-        with tarfile.open(tar_with_links, 'w:gz') as tar:
-          tar.add(link)
-          self.assertFalse(
-              download_from_google_storage._validate_tar_file(
-                  tar, tar_dir_link))
-
-      # Test not outside.
-      tar_dir_outside = 'outside_tar'
-      os.makedirs(tar_dir_outside)
-      tar_with_outside = 'with_outside.tar.gz'
-      with tarfile.open(tar_with_outside, 'w:gz') as tar:
-        tar.add(lorem_ipsum)
-        self.assertFalse(
-            download_from_google_storage._validate_tar_file(tar,
-                                                            tar_dir_outside))
-      # Test no ../
-      tar_with_dotdot = 'with_dotdot.tar.gz'
-      dotdot_file = os.path.join(tar_dir, '..', tar_dir, 'lorem_ipsum.txt')
-      with tarfile.open(tar_with_dotdot, 'w:gz') as tar:
-        tar.add(dotdot_file)
-        self.assertFalse(
-            download_from_google_storage._validate_tar_file(tar,
-                                                            tar_dir))
-      # Test normal file with .. in name okay
-      tar_with_hidden = 'with_normal_dotdot.tar.gz'
-      hidden_file = os.path.join(tar_dir, '..hidden_file.txt')
-      shutil.copyfile(lorem_ipsum, hidden_file)
-      with tarfile.open(tar_with_hidden, 'w:gz') as tar:
-        tar.add(hidden_file)
-        self.assertTrue(
-            download_from_google_storage._validate_tar_file(tar,
-                                                            tar_dir))
-
-  def test_gsutil(self):
-    # This will download a real gsutil package from Google Storage.
-    gsutil = download_from_google_storage.Gsutil(GSUTIL_DEFAULT_PATH, None)
-    self.assertEqual(gsutil.path, GSUTIL_DEFAULT_PATH)
-    code, _, err = gsutil.check_call()
-    self.assertEqual(code, 0, err)
-    self.assertEqual(err, '')
-
-  def test_get_sha1(self):
-    lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
-    self.assertEqual(
-        download_from_google_storage.get_sha1(lorem_ipsum),
-        '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
-
-  def test_get_md5(self):
-    lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
-    self.assertEqual(
-        upload_to_google_storage.get_md5(lorem_ipsum),
-        '634d7c1ed3545383837428f031840a1e')
-
-  def test_get_md5_cached_read(self):
-    lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
-    # Use a fake 'stale' MD5 sum.  Expected behavior is to return stale sum.
-    self.assertEqual(
-        upload_to_google_storage.get_md5_cached(lorem_ipsum),
-        '734d7c1ed3545383837428f031840a1e')
-
-  def test_get_md5_cached_write(self):
-    lorem_ipsum2 = os.path.join(self.base_path, 'lorem_ipsum2.txt')
-    lorem_ipsum2_md5 = os.path.join(self.base_path, 'lorem_ipsum2.txt.md5')
-    if os.path.exists(lorem_ipsum2_md5):
-      os.remove(lorem_ipsum2_md5)
-    # Use a fake 'stale' MD5 sum.  Expected behavior is to return stale sum.
-    self.assertEqual(
-        upload_to_google_storage.get_md5_cached(lorem_ipsum2),
-        '4c02d1eb455a0f22c575265d17b84b6d')
-    self.assertTrue(os.path.exists(lorem_ipsum2_md5))
-    self.assertEqual(
-        open(lorem_ipsum2_md5, 'rb').read().decode(),
-        '4c02d1eb455a0f22c575265d17b84b6d')
-    os.remove(lorem_ipsum2_md5)  # Clean up.
-    self.assertFalse(os.path.exists(lorem_ipsum2_md5))
+    def setUp(self):
+        self.temp_dir = tempfile.mkdtemp(prefix='gstools_test')
+        self.base_path = os.path.join(self.temp_dir, 'test_files')
+        shutil.copytree(os.path.join(TEST_DIR, 'gstools'), self.base_path)
+
+    def tearDown(self):
+        shutil.rmtree(self.temp_dir)
+
+    def test_validate_tar_file(self):
+        lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
+        with ChangedWorkingDirectory(self.base_path):
+            # Sanity ok check.
+            tar_dir = 'ok_dir'
+            os.makedirs(os.path.join(self.base_path, tar_dir))
+            tar = 'good.tar.gz'
+            lorem_ipsum_copy = os.path.join(tar_dir, 'lorem_ipsum.txt')
+            shutil.copyfile(lorem_ipsum, lorem_ipsum_copy)
+            with tarfile.open(tar, 'w:gz') as tar:
+                tar.add(lorem_ipsum_copy)
+                self.assertTrue(
+                    download_from_google_storage._validate_tar_file(
+                        tar, tar_dir))
+
+            # os.symlink doesn't exist on Windows.
+            if sys.platform != 'win32':
+                # Test no links.
+                tar_dir_link = 'for_tar_link'
+                os.makedirs(tar_dir_link)
+                link = os.path.join(tar_dir_link, 'link')
+                os.symlink(lorem_ipsum, link)
+                tar_with_links = 'with_links.tar.gz'
+                with tarfile.open(tar_with_links, 'w:gz') as tar:
+                    tar.add(link)
+                    self.assertFalse(
+                        download_from_google_storage._validate_tar_file(
+                            tar, tar_dir_link))
+
+            # Test not outside.
+            tar_dir_outside = 'outside_tar'
+            os.makedirs(tar_dir_outside)
+            tar_with_outside = 'with_outside.tar.gz'
+            with tarfile.open(tar_with_outside, 'w:gz') as tar:
+                tar.add(lorem_ipsum)
+                self.assertFalse(
+                    download_from_google_storage._validate_tar_file(
+                        tar, tar_dir_outside))
+            # Test no ../
+            tar_with_dotdot = 'with_dotdot.tar.gz'
+            dotdot_file = os.path.join(tar_dir, '..', tar_dir,
+                                       'lorem_ipsum.txt')
+            with tarfile.open(tar_with_dotdot, 'w:gz') as tar:
+                tar.add(dotdot_file)
+                self.assertFalse(
+                    download_from_google_storage._validate_tar_file(
+                        tar, tar_dir))
+            # Test normal file with .. in name okay
+            tar_with_hidden = 'with_normal_dotdot.tar.gz'
+            hidden_file = os.path.join(tar_dir, '..hidden_file.txt')
+            shutil.copyfile(lorem_ipsum, hidden_file)
+            with tarfile.open(tar_with_hidden, 'w:gz') as tar:
+                tar.add(hidden_file)
+                self.assertTrue(
+                    download_from_google_storage._validate_tar_file(
+                        tar, tar_dir))
+
+    def test_gsutil(self):
+        # This will download a real gsutil package from Google Storage.
+        gsutil = download_from_google_storage.Gsutil(GSUTIL_DEFAULT_PATH, None)
+        self.assertEqual(gsutil.path, GSUTIL_DEFAULT_PATH)
+        code, _, err = gsutil.check_call()
+        self.assertEqual(code, 0, err)
+        self.assertEqual(err, '')
+
+    def test_get_sha1(self):
+        lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
+        self.assertEqual(download_from_google_storage.get_sha1(lorem_ipsum),
+                         '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
+
+    def test_get_md5(self):
+        lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
+        self.assertEqual(upload_to_google_storage.get_md5(lorem_ipsum),
+                         '634d7c1ed3545383837428f031840a1e')
+
+    def test_get_md5_cached_read(self):
+        lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
+        # Use a fake 'stale' MD5 sum.  Expected behavior is to return stale sum.
+        self.assertEqual(upload_to_google_storage.get_md5_cached(lorem_ipsum),
+                         '734d7c1ed3545383837428f031840a1e')
+
+    def test_get_md5_cached_write(self):
+        lorem_ipsum2 = os.path.join(self.base_path, 'lorem_ipsum2.txt')
+        lorem_ipsum2_md5 = os.path.join(self.base_path, 'lorem_ipsum2.txt.md5')
+        if os.path.exists(lorem_ipsum2_md5):
+            os.remove(lorem_ipsum2_md5)
+        # Use a fake 'stale' MD5 sum.  Expected behavior is to return stale sum.
+        self.assertEqual(upload_to_google_storage.get_md5_cached(lorem_ipsum2),
+                         '4c02d1eb455a0f22c575265d17b84b6d')
+        self.assertTrue(os.path.exists(lorem_ipsum2_md5))
+        self.assertEqual(
+            open(lorem_ipsum2_md5, 'rb').read().decode(),
+            '4c02d1eb455a0f22c575265d17b84b6d')
+        os.remove(lorem_ipsum2_md5)  # Clean up.
+        self.assertFalse(os.path.exists(lorem_ipsum2_md5))
 
 
 class DownloadTests(unittest.TestCase):
-  def setUp(self):
-    self.gsutil = GsutilMock(GSUTIL_DEFAULT_PATH, None)
-    self.temp_dir = tempfile.mkdtemp(prefix='gstools_test')
-    self.checkout_test_files = os.path.join(
-        TEST_DIR, 'gstools', 'download_test_data')
-    self.base_path = os.path.join(
-        self.temp_dir, 'download_test_data')
-    shutil.copytree(self.checkout_test_files, self.base_path)
-    self.base_url = 'gs://sometesturl'
-    self.parser = optparse.OptionParser()
-    self.queue = queue.Queue()
-    self.ret_codes = queue.Queue()
-    self.lorem_ipsum = os.path.join(TEST_DIR, 'gstools', 'lorem_ipsum.txt')
-    self.lorem_ipsum_sha1 = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
-    self.maxDiff = None
-
-  def tearDown(self):
-    shutil.rmtree(self.temp_dir)
-
-  def test_enumerate_files_non_recursive(self):
-    for item in download_from_google_storage.enumerate_input(
-        self.base_path, True, False, False, None, False, False):
-      self.queue.put(item)
-    expected_queue = [
-        ('e6c4fbd4fe7607f3e6ebf68b2ea4ef694da7b4fe',
-            os.path.join(self.base_path, 'rootfolder_text.txt')),
-       ('7871c8e24da15bad8b0be2c36edc9dc77e37727f',
-            os.path.join(self.base_path, 'uploaded_lorem_ipsum.txt'))]
-    self.assertEqual(sorted(expected_queue), sorted(self.queue.queue))
-
-  def test_enumerate_files_recursive(self):
-    for item in download_from_google_storage.enumerate_input(
-        self.base_path, True, True, False, None, False, False):
-      self.queue.put(item)
-    expected_queue = [
-        ('e6c4fbd4fe7607f3e6ebf68b2ea4ef694da7b4fe',
-            os.path.join(self.base_path, 'rootfolder_text.txt')),
-        ('7871c8e24da15bad8b0be2c36edc9dc77e37727f',
-            os.path.join(self.base_path, 'uploaded_lorem_ipsum.txt')),
-        ('b5415aa0b64006a95c0c409182e628881d6d6463',
-            os.path.join(self.base_path, 'subfolder', 'subfolder_text.txt'))]
-    self.assertEqual(sorted(expected_queue), sorted(self.queue.queue))
-
-  def test_download_worker_single_file(self):
-    sha1_hash = self.lorem_ipsum_sha1
-    input_filename = '%s/%s' % (self.base_url, sha1_hash)
-    output_filename = os.path.join(self.base_path, 'uploaded_lorem_ipsum.txt')
-    self.gsutil.add_expected(0, '', '', lambda: shutil.copyfile(
-        self.lorem_ipsum, output_filename))  # cp
-    self.queue.put((sha1_hash, output_filename))
-    self.queue.put((None, None))
-    stdout_queue = queue.Queue()
-    download_from_google_storage._downloader_worker_thread(
-        0, self.queue, False, self.base_url, self.gsutil,
-        stdout_queue, self.ret_codes, True, False)
-    expected_calls = [
-        ('check_call',
-            ('cp', input_filename, output_filename))]
-    sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
-    if sys.platform != 'win32':
-      expected_calls.append(
-          ('check_call', ('stat', 'gs://sometesturl/' + sha1_hash)))
-    expected_output = ['0> Downloading %s@%s...' % (output_filename, sha1_hash)]
-    expected_ret_codes = []
-    self.assertEqual(list(stdout_queue.queue), expected_output)
-    self.assertEqual(self.gsutil.history, expected_calls)
-    self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
-
-  def test_download_worker_skips_file(self):
-    sha1_hash = 'e6c4fbd4fe7607f3e6ebf68b2ea4ef694da7b4fe'
-    output_filename = os.path.join(self.base_path, 'rootfolder_text.txt')
-    self.queue.put((sha1_hash, output_filename))
-    self.queue.put((None, None))
-    stdout_queue = queue.Queue()
-    download_from_google_storage._downloader_worker_thread(
-        0, self.queue, False, self.base_url, self.gsutil,
-        stdout_queue, self.ret_codes, True, False)
-    # dfgs does not output anything in the no-op case.
-    self.assertEqual(list(stdout_queue.queue), [])
-    self.assertEqual(self.gsutil.history, [])
-
-  def test_download_extract_archive(self):
-    # Generate a gzipped tarfile
-    output_filename = os.path.join(self.base_path, 'subfolder.tar.gz')
-    output_dirname = os.path.join(self.base_path, 'subfolder')
-    extracted_filename = os.path.join(output_dirname, 'subfolder_text.txt')
-    with tarfile.open(output_filename, 'w:gz') as tar:
-      tar.add(output_dirname, arcname='subfolder')
-    shutil.rmtree(output_dirname)
-    sha1_hash = download_from_google_storage.get_sha1(output_filename)
-    input_filename = '%s/%s' % (self.base_url, sha1_hash)
-
-    # Initial download
-    self.queue.put((sha1_hash, output_filename))
-    self.queue.put((None, None))
-    stdout_queue = queue.Queue()
-    download_from_google_storage._downloader_worker_thread(
-        0, self.queue, True, self.base_url, self.gsutil,
-        stdout_queue, self.ret_codes, True, True, delete=False)
-    expected_calls = [
-        ('check_call',
-            ('cp', input_filename, output_filename))]
-    if sys.platform != 'win32':
-      expected_calls.append(
-          ('check_call',
-           ('stat',
-            'gs://sometesturl/%s' % sha1_hash)))
-    expected_output = ['0> Downloading %s@%s...' % (output_filename, sha1_hash)]
-    expected_output.extend([
-        '0> Extracting 3 entries from %s to %s' % (output_filename,
-                                                   output_dirname)])
-    expected_ret_codes = []
-    self.assertEqual(list(stdout_queue.queue), expected_output)
-    self.assertEqual(self.gsutil.history, expected_calls)
-    self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
-    self.assertTrue(os.path.exists(output_dirname))
-    self.assertTrue(os.path.exists(extracted_filename))
-
-    # Test noop download
-    self.queue.put((sha1_hash, output_filename))
-    self.queue.put((None, None))
-    stdout_queue = queue.Queue()
-    download_from_google_storage._downloader_worker_thread(0,
-                                                           self.queue,
-                                                           False,
-                                                           self.base_url,
-                                                           self.gsutil,
-                                                           stdout_queue,
-                                                           self.ret_codes,
-                                                           True,
-                                                           True,
-                                                           delete=False)
-
-    self.assertEqual(list(stdout_queue.queue), [])
-    self.assertEqual(self.gsutil.history, expected_calls)
-    self.assertEqual(list(self.ret_codes.queue), [])
-    self.assertTrue(os.path.exists(output_dirname))
-    self.assertTrue(os.path.exists(extracted_filename))
-
-    # With dirty flag file, previous extraction wasn't complete
-    with open(os.path.join(self.base_path, 'subfolder.tmp'), 'a'):
-      pass
-
-    self.queue.put((sha1_hash, output_filename))
-    self.queue.put((None, None))
-    stdout_queue = queue.Queue()
-    download_from_google_storage._downloader_worker_thread(0,
-                                                           self.queue,
-                                                           False,
-                                                           self.base_url,
-                                                           self.gsutil,
-                                                           stdout_queue,
-                                                           self.ret_codes,
-                                                           True,
-                                                           True,
-                                                           delete=False)
-    expected_calls += [('check_call', ('cp', input_filename, output_filename))]
-    if sys.platform != 'win32':
-      expected_calls.append(
-          ('check_call', ('stat', 'gs://sometesturl/%s' % sha1_hash)))
-    expected_output = [
-        '0> Detected tmp flag file for %s, re-downloading...' %
-        (output_filename),
-        '0> Downloading %s@%s...' % (output_filename, sha1_hash),
-        '0> Removed %s...' % (output_dirname),
-        '0> Extracting 3 entries from %s to %s' %
-        (output_filename, output_dirname),
-    ]
-    expected_ret_codes = []
-    self.assertEqual(list(stdout_queue.queue), expected_output)
-    self.assertEqual(self.gsutil.history, expected_calls)
-    self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
-    self.assertTrue(os.path.exists(output_dirname))
-    self.assertTrue(os.path.exists(extracted_filename))
-
-  def test_download_worker_skips_not_found_file(self):
-    sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
-    input_filename = '%s/%s' % (self.base_url, sha1_hash)
-    output_filename = os.path.join(self.base_path, 'uploaded_lorem_ipsum.txt')
-    self.queue.put((sha1_hash, output_filename))
-    self.queue.put((None, None))
-    stdout_queue = queue.Queue()
-    self.gsutil.add_expected(1, '', '')  # Return error when 'cp' is called.
-    download_from_google_storage._downloader_worker_thread(
-        0, self.queue, False, self.base_url, self.gsutil,
-        stdout_queue, self.ret_codes, True, False)
-    expected_output = [
-        '0> Downloading %s@%s...' % (output_filename, sha1_hash),
-        '0> Failed to fetch file %s for %s, skipping. [Err: ]' % (
-            input_filename, output_filename),
-    ]
-    expected_calls = [
-        ('check_call',
-            ('cp', input_filename, output_filename))
-    ]
-    expected_ret_codes = [
-        (1, 'Failed to fetch file %s for %s. [Err: ]' % (
-            input_filename, output_filename))
-    ]
-    self.assertEqual(list(stdout_queue.queue), expected_output)
-    self.assertEqual(self.gsutil.history, expected_calls)
-    self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
-
-  def test_download_cp_fails(self):
-    sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
-    input_filename = '%s/%s' % (self.base_url, sha1_hash)
-    output_filename = os.path.join(self.base_path, 'uploaded_lorem_ipsum.txt')
-    self.gsutil.add_expected(101, '', 'Test error message.') # cp
-    code = download_from_google_storage.download_from_google_storage(
-        input_filename=sha1_hash,
-        base_url=self.base_url,
-        gsutil=self.gsutil,
-        num_threads=1,
-        directory=False,
-        recursive=False,
-        force=True,
-        output=output_filename,
-        ignore_errors=False,
-        sha1_file=False,
-        verbose=True,
-        auto_platform=False,
-        extract=False)
-    expected_calls = [
-        ('check_call',
-            ('cp', input_filename, output_filename))
-    ]
-    self.assertEqual(self.gsutil.history, expected_calls)
-    self.assertEqual(code, 101)
-
-  def test_corrupt_download(self):
-    q = queue.Queue()
-    out_q = queue.Queue()
-    ret_codes = queue.Queue()
-    tmp_dir = tempfile.mkdtemp()
-    sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
-    output_filename = os.path.join(tmp_dir, 'lorem_ipsum.txt')
-    q.put(('7871c8e24da15bad8b0be2c36edc9dc77e37727f', output_filename))
-    q.put((None, None))
-    def _write_bad_file():
-      with open(output_filename, 'w') as f:
-        f.write('foobar')
-    self.gsutil.add_expected(0, '', '', _write_bad_file) # cp
-    download_from_google_storage._downloader_worker_thread(
-        1, q, True, self.base_url, self.gsutil, out_q, ret_codes, True, False)
-    self.assertTrue(q.empty())
-    msg = ('1> ERROR remote sha1 (%s) does not match expected sha1 (%s).' %
-           ('8843d7f92416211de9ebb963ff4ce28125932878', sha1_hash))
-    self.assertEqual(out_q.get(),
-                     '1> Downloading %s@%s...' % (output_filename, sha1_hash))
-    self.assertEqual(out_q.get(), msg)
-    self.assertEqual(ret_codes.get(), (20, msg))
-    self.assertTrue(out_q.empty())
-    self.assertTrue(ret_codes.empty())
-
-
-  def test_download_directory_no_recursive_non_force(self):
-    sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
-    input_filename = '%s/%s' % (self.base_url, sha1_hash)
-    output_filename = os.path.join(self.base_path, 'uploaded_lorem_ipsum.txt')
-    self.gsutil.add_expected(0, '', '')  # version
-    self.gsutil.add_expected(0, '', '', lambda: shutil.copyfile(
-        self.lorem_ipsum, output_filename))  # cp
-    code = download_from_google_storage.download_from_google_storage(
-        input_filename=self.base_path,
-        base_url=self.base_url,
-        gsutil=self.gsutil,
-        num_threads=1,
-        directory=True,
-        recursive=False,
-        force=False,
-        output=None,
-        ignore_errors=False,
-        sha1_file=False,
-        verbose=True,
-        auto_platform=False,
-        extract=False)
-    expected_calls = [
-        ('check_call', ('version',)),
-        ('check_call',
-            ('cp', input_filename, output_filename))]
-    if sys.platform != 'win32':
-      expected_calls.append(
-          ('check_call',
-           ('stat',
-            'gs://sometesturl/7871c8e24da15bad8b0be2c36edc9dc77e37727f')))
-    self.assertEqual(self.gsutil.history, expected_calls)
-    self.assertEqual(code, 0)
+    def setUp(self):
+        self.gsutil = GsutilMock(GSUTIL_DEFAULT_PATH, None)
+        self.temp_dir = tempfile.mkdtemp(prefix='gstools_test')
+        self.checkout_test_files = os.path.join(TEST_DIR, 'gstools',
+                                                'download_test_data')
+        self.base_path = os.path.join(self.temp_dir, 'download_test_data')
+        shutil.copytree(self.checkout_test_files, self.base_path)
+        self.base_url = 'gs://sometesturl'
+        self.parser = optparse.OptionParser()
+        self.queue = queue.Queue()
+        self.ret_codes = queue.Queue()
+        self.lorem_ipsum = os.path.join(TEST_DIR, 'gstools', 'lorem_ipsum.txt')
+        self.lorem_ipsum_sha1 = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
+        self.maxDiff = None
+
+    def tearDown(self):
+        shutil.rmtree(self.temp_dir)
+
+    def test_enumerate_files_non_recursive(self):
+        for item in download_from_google_storage.enumerate_input(
+                self.base_path, True, False, False, None, False, False):
+            self.queue.put(item)
+        expected_queue = [('e6c4fbd4fe7607f3e6ebf68b2ea4ef694da7b4fe',
+                           os.path.join(self.base_path, 'rootfolder_text.txt')),
+                          ('7871c8e24da15bad8b0be2c36edc9dc77e37727f',
+                           os.path.join(self.base_path,
+                                        'uploaded_lorem_ipsum.txt'))]
+        self.assertEqual(sorted(expected_queue), sorted(self.queue.queue))
+
+    def test_enumerate_files_recursive(self):
+        for item in download_from_google_storage.enumerate_input(
+                self.base_path, True, True, False, None, False, False):
+            self.queue.put(item)
+        expected_queue = [('e6c4fbd4fe7607f3e6ebf68b2ea4ef694da7b4fe',
+                           os.path.join(self.base_path, 'rootfolder_text.txt')),
+                          ('7871c8e24da15bad8b0be2c36edc9dc77e37727f',
+                           os.path.join(self.base_path,
+                                        'uploaded_lorem_ipsum.txt')),
+                          ('b5415aa0b64006a95c0c409182e628881d6d6463',
+                           os.path.join(self.base_path, 'subfolder',
+                                        'subfolder_text.txt'))]
+        self.assertEqual(sorted(expected_queue), sorted(self.queue.queue))
+
+    def test_download_worker_single_file(self):
+        sha1_hash = self.lorem_ipsum_sha1
+        input_filename = '%s/%s' % (self.base_url, sha1_hash)
+        output_filename = os.path.join(self.base_path,
+                                       'uploaded_lorem_ipsum.txt')
+        self.gsutil.add_expected(
+            0, '', '',
+            lambda: shutil.copyfile(self.lorem_ipsum, output_filename))  # cp
+        self.queue.put((sha1_hash, output_filename))
+        self.queue.put((None, None))
+        stdout_queue = queue.Queue()
+        download_from_google_storage._downloader_worker_thread(
+            0, self.queue, False, self.base_url, self.gsutil, stdout_queue,
+            self.ret_codes, True, False)
+        expected_calls = [('check_call', ('cp', input_filename,
+                                          output_filename))]
+        sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
+        if sys.platform != 'win32':
+            expected_calls.append(
+                ('check_call', ('stat', 'gs://sometesturl/' + sha1_hash)))
+        expected_output = [
+            '0> Downloading %s@%s...' % (output_filename, sha1_hash)
+        ]
+        expected_ret_codes = []
+        self.assertEqual(list(stdout_queue.queue), expected_output)
+        self.assertEqual(self.gsutil.history, expected_calls)
+        self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
+
+    def test_download_worker_skips_file(self):
+        sha1_hash = 'e6c4fbd4fe7607f3e6ebf68b2ea4ef694da7b4fe'
+        output_filename = os.path.join(self.base_path, 'rootfolder_text.txt')
+        self.queue.put((sha1_hash, output_filename))
+        self.queue.put((None, None))
+        stdout_queue = queue.Queue()
+        download_from_google_storage._downloader_worker_thread(
+            0, self.queue, False, self.base_url, self.gsutil, stdout_queue,
+            self.ret_codes, True, False)
+        # dfgs does not output anything in the no-op case.
+        self.assertEqual(list(stdout_queue.queue), [])
+        self.assertEqual(self.gsutil.history, [])
+
+    def test_download_extract_archive(self):
+        # Generate a gzipped tarfile
+        output_filename = os.path.join(self.base_path, 'subfolder.tar.gz')
+        output_dirname = os.path.join(self.base_path, 'subfolder')
+        extracted_filename = os.path.join(output_dirname, 'subfolder_text.txt')
+        with tarfile.open(output_filename, 'w:gz') as tar:
+            tar.add(output_dirname, arcname='subfolder')
+        shutil.rmtree(output_dirname)
+        sha1_hash = download_from_google_storage.get_sha1(output_filename)
+        input_filename = '%s/%s' % (self.base_url, sha1_hash)
+
+        # Initial download
+        self.queue.put((sha1_hash, output_filename))
+        self.queue.put((None, None))
+        stdout_queue = queue.Queue()
+        download_from_google_storage._downloader_worker_thread(0,
+                                                               self.queue,
+                                                               True,
+                                                               self.base_url,
+                                                               self.gsutil,
+                                                               stdout_queue,
+                                                               self.ret_codes,
+                                                               True,
+                                                               True,
+                                                               delete=False)
+        expected_calls = [('check_call', ('cp', input_filename,
+                                          output_filename))]
+        if sys.platform != 'win32':
+            expected_calls.append(
+                ('check_call', ('stat', 'gs://sometesturl/%s' % sha1_hash)))
+        expected_output = [
+            '0> Downloading %s@%s...' % (output_filename, sha1_hash)
+        ]
+        expected_output.extend([
+            '0> Extracting 3 entries from %s to %s' %
+            (output_filename, output_dirname)
+        ])
+        expected_ret_codes = []
+        self.assertEqual(list(stdout_queue.queue), expected_output)
+        self.assertEqual(self.gsutil.history, expected_calls)
+        self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
+        self.assertTrue(os.path.exists(output_dirname))
+        self.assertTrue(os.path.exists(extracted_filename))
+
+        # Test noop download
+        self.queue.put((sha1_hash, output_filename))
+        self.queue.put((None, None))
+        stdout_queue = queue.Queue()
+        download_from_google_storage._downloader_worker_thread(0,
+                                                               self.queue,
+                                                               False,
+                                                               self.base_url,
+                                                               self.gsutil,
+                                                               stdout_queue,
+                                                               self.ret_codes,
+                                                               True,
+                                                               True,
+                                                               delete=False)
+
+        self.assertEqual(list(stdout_queue.queue), [])
+        self.assertEqual(self.gsutil.history, expected_calls)
+        self.assertEqual(list(self.ret_codes.queue), [])
+        self.assertTrue(os.path.exists(output_dirname))
+        self.assertTrue(os.path.exists(extracted_filename))
+
+        # With dirty flag file, previous extraction wasn't complete
+        with open(os.path.join(self.base_path, 'subfolder.tmp'), 'a'):
+            pass
+
+        self.queue.put((sha1_hash, output_filename))
+        self.queue.put((None, None))
+        stdout_queue = queue.Queue()
+        download_from_google_storage._downloader_worker_thread(0,
+                                                               self.queue,
+                                                               False,
+                                                               self.base_url,
+                                                               self.gsutil,
+                                                               stdout_queue,
+                                                               self.ret_codes,
+                                                               True,
+                                                               True,
+                                                               delete=False)
+        expected_calls += [('check_call', ('cp', input_filename,
+                                           output_filename))]
+        if sys.platform != 'win32':
+            expected_calls.append(
+                ('check_call', ('stat', 'gs://sometesturl/%s' % sha1_hash)))
+        expected_output = [
+            '0> Detected tmp flag file for %s, re-downloading...' %
+            (output_filename),
+            '0> Downloading %s@%s...' % (output_filename, sha1_hash),
+            '0> Removed %s...' % (output_dirname),
+            '0> Extracting 3 entries from %s to %s' %
+            (output_filename, output_dirname),
+        ]
+        expected_ret_codes = []
+        self.assertEqual(list(stdout_queue.queue), expected_output)
+        self.assertEqual(self.gsutil.history, expected_calls)
+        self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
+        self.assertTrue(os.path.exists(output_dirname))
+        self.assertTrue(os.path.exists(extracted_filename))
+
+    def test_download_worker_skips_not_found_file(self):
+        sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
+        input_filename = '%s/%s' % (self.base_url, sha1_hash)
+        output_filename = os.path.join(self.base_path,
+                                       'uploaded_lorem_ipsum.txt')
+        self.queue.put((sha1_hash, output_filename))
+        self.queue.put((None, None))
+        stdout_queue = queue.Queue()
+        self.gsutil.add_expected(1, '', '')  # Return error when 'cp' is called.
+        download_from_google_storage._downloader_worker_thread(
+            0, self.queue, False, self.base_url, self.gsutil, stdout_queue,
+            self.ret_codes, True, False)
+        expected_output = [
+            '0> Downloading %s@%s...' % (output_filename, sha1_hash),
+            '0> Failed to fetch file %s for %s, skipping. [Err: ]' %
+            (input_filename, output_filename),
+        ]
+        expected_calls = [('check_call', ('cp', input_filename,
+                                          output_filename))]
+        expected_ret_codes = [(1, 'Failed to fetch file %s for %s. [Err: ]' %
+                               (input_filename, output_filename))]
+        self.assertEqual(list(stdout_queue.queue), expected_output)
+        self.assertEqual(self.gsutil.history, expected_calls)
+        self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
+
+    def test_download_cp_fails(self):
+        sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
+        input_filename = '%s/%s' % (self.base_url, sha1_hash)
+        output_filename = os.path.join(self.base_path,
+                                       'uploaded_lorem_ipsum.txt')
+        self.gsutil.add_expected(101, '', 'Test error message.')  # cp
+        code = download_from_google_storage.download_from_google_storage(
+            input_filename=sha1_hash,
+            base_url=self.base_url,
+            gsutil=self.gsutil,
+            num_threads=1,
+            directory=False,
+            recursive=False,
+            force=True,
+            output=output_filename,
+            ignore_errors=False,
+            sha1_file=False,
+            verbose=True,
+            auto_platform=False,
+            extract=False)
+        expected_calls = [('check_call', ('cp', input_filename,
+                                          output_filename))]
+        self.assertEqual(self.gsutil.history, expected_calls)
+        self.assertEqual(code, 101)
+
+    def test_corrupt_download(self):
+        q = queue.Queue()
+        out_q = queue.Queue()
+        ret_codes = queue.Queue()
+        tmp_dir = tempfile.mkdtemp()
+        sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
+        output_filename = os.path.join(tmp_dir, 'lorem_ipsum.txt')
+        q.put(('7871c8e24da15bad8b0be2c36edc9dc77e37727f', output_filename))
+        q.put((None, None))
+
+        def _write_bad_file():
+            with open(output_filename, 'w') as f:
+                f.write('foobar')
+
+        self.gsutil.add_expected(0, '', '', _write_bad_file)  # cp
+        download_from_google_storage._downloader_worker_thread(
+            1, q, True, self.base_url, self.gsutil, out_q, ret_codes, True,
+            False)
+        self.assertTrue(q.empty())
+        msg = ('1> ERROR remote sha1 (%s) does not match expected sha1 (%s).' %
+               ('8843d7f92416211de9ebb963ff4ce28125932878', sha1_hash))
+        self.assertEqual(
+            out_q.get(),
+            '1> Downloading %s@%s...' % (output_filename, sha1_hash))
+        self.assertEqual(out_q.get(), msg)
+        self.assertEqual(ret_codes.get(), (20, msg))
+        self.assertTrue(out_q.empty())
+        self.assertTrue(ret_codes.empty())
+
+    def test_download_directory_no_recursive_non_force(self):
+        sha1_hash = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
+        input_filename = '%s/%s' % (self.base_url, sha1_hash)
+        output_filename = os.path.join(self.base_path,
+                                       'uploaded_lorem_ipsum.txt')
+        self.gsutil.add_expected(0, '', '')  # version
+        self.gsutil.add_expected(
+            0, '', '',
+            lambda: shutil.copyfile(self.lorem_ipsum, output_filename))  # cp
+        code = download_from_google_storage.download_from_google_storage(
+            input_filename=self.base_path,
+            base_url=self.base_url,
+            gsutil=self.gsutil,
+            num_threads=1,
+            directory=True,
+            recursive=False,
+            force=False,
+            output=None,
+            ignore_errors=False,
+            sha1_file=False,
+            verbose=True,
+            auto_platform=False,
+            extract=False)
+        expected_calls = [('check_call', ('version', )),
+                          ('check_call', ('cp', input_filename,
+                                          output_filename))]
+        if sys.platform != 'win32':
+            expected_calls.append(
+                ('check_call',
+                 ('stat',
+                  'gs://sometesturl/7871c8e24da15bad8b0be2c36edc9dc77e37727f')))
+        self.assertEqual(self.gsutil.history, expected_calls)
+        self.assertEqual(code, 0)
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 230 - 224
tests/fetch_test.py

@@ -22,249 +22,255 @@ import fetch
 
 
 class SystemExitMock(Exception):
-  pass
+    pass
+
 
 class TestUtilityFunctions(unittest.TestCase):
-  """This test case is against utility functions"""
-
-  def _usage_static_message(self, stdout):
-    valid_fetch_config_text = 'Valid fetch configs:'
-    self.assertIn(valid_fetch_config_text, stdout)
-
-    # split[0] contains static text, whereas split[1] contains list of configs
-    split = stdout.split(valid_fetch_config_text)
-    self.assertEqual(2, len(split))
-
-    # verify a few fetch_configs
-    self.assertIn('foo', split[1])
-    self.assertNotIn('bar', split[1])
-
-  def test_handle_args_valid_usage(self):
-    response = fetch.handle_args(['filename', 'foo'])
-    self.assertEqual(
-        argparse.Namespace(dry_run=False,
-                           nohooks=False,
-                           nohistory=False,
-                           force=False,
-                           config='foo',
-                           protocol_override=None,
-                           props=[]), response)
-
-    response = fetch.handle_args([
-        'filename', '-n', '--dry-run', '--nohooks', '--no-history', '--force',
-        '--protocol-override', 'sso', 'foo', '--some-param=1', '--bar=2'
-    ])
-    self.assertEqual(
-        argparse.Namespace(dry_run=True,
-                           nohooks=True,
-                           nohistory=True,
-                           force=True,
-                           config='foo',
-                           protocol_override='sso',
-                           props=['--some-param=1', '--bar=2']), response)
-
-    response = fetch.handle_args([
-        'filename', '-n', '--dry-run', '--no-hooks', '--nohistory', '--force',
-        '-p', 'sso', 'foo', '--some-param=1', '--bar=2'
-    ])
-    self.assertEqual(
-        argparse.Namespace(dry_run=True,
-                           nohooks=True,
-                           nohistory=True,
-                           force=True,
-                           config='foo',
-                           protocol_override='sso',
-                           props=['--some-param=1', '--bar=2']), response)
-
-  @mock.patch('os.path.exists', return_value=False)
-  @mock.patch('sys.stdout', StringIO())
-  @mock.patch('sys.exit', side_effect=SystemExitMock)
-  def test_run_config_fetch_not_found(self, exit_mock, exists):
-    with self.assertRaises(SystemExitMock):
-      fetch.run_config_fetch('foo', [])
-    exit_mock.assert_called_with(1)
-    exists.assert_called_once()
-
-    self.assertEqual(1, len(exists.call_args[0]))
-    self.assertTrue(exists.call_args[0][0].endswith('foo.py'))
-
-    stdout = sys.stdout.getvalue()
-    self.assertEqual('Could not find a config for foo\n', stdout)
-
-  def test_run_config_fetch_integration(self):
-    config = fetch.run_config_fetch('depot_tools', [])
-    url = 'https://chromium.googlesource.com/chromium/tools/depot_tools.git'
-    spec = {
-        'type': 'gclient_git',
-        'gclient_git_spec': {
-            'solutions': [{
-                'url': url,
-                'managed': False,
-                'name': 'depot_tools',
-                'deps_file': 'DEPS',
-            }],
+    """This test case is against utility functions"""
+    def _usage_static_message(self, stdout):
+        valid_fetch_config_text = 'Valid fetch configs:'
+        self.assertIn(valid_fetch_config_text, stdout)
+
+        # split[0] contains static text, whereas split[1] contains list of
+        # configs
+        split = stdout.split(valid_fetch_config_text)
+        self.assertEqual(2, len(split))
+
+        # verify a few fetch_configs
+        self.assertIn('foo', split[1])
+        self.assertNotIn('bar', split[1])
+
+    def test_handle_args_valid_usage(self):
+        response = fetch.handle_args(['filename', 'foo'])
+        self.assertEqual(
+            argparse.Namespace(dry_run=False,
+                               nohooks=False,
+                               nohistory=False,
+                               force=False,
+                               config='foo',
+                               protocol_override=None,
+                               props=[]), response)
+
+        response = fetch.handle_args([
+            'filename', '-n', '--dry-run', '--nohooks', '--no-history',
+            '--force', '--protocol-override', 'sso', 'foo', '--some-param=1',
+            '--bar=2'
+        ])
+        self.assertEqual(
+            argparse.Namespace(dry_run=True,
+                               nohooks=True,
+                               nohistory=True,
+                               force=True,
+                               config='foo',
+                               protocol_override='sso',
+                               props=['--some-param=1', '--bar=2']), response)
+
+        response = fetch.handle_args([
+            'filename', '-n', '--dry-run', '--no-hooks', '--nohistory',
+            '--force', '-p', 'sso', 'foo', '--some-param=1', '--bar=2'
+        ])
+        self.assertEqual(
+            argparse.Namespace(dry_run=True,
+                               nohooks=True,
+                               nohistory=True,
+                               force=True,
+                               config='foo',
+                               protocol_override='sso',
+                               props=['--some-param=1', '--bar=2']), response)
+
+    @mock.patch('os.path.exists', return_value=False)
+    @mock.patch('sys.stdout', StringIO())
+    @mock.patch('sys.exit', side_effect=SystemExitMock)
+    def test_run_config_fetch_not_found(self, exit_mock, exists):
+        with self.assertRaises(SystemExitMock):
+            fetch.run_config_fetch('foo', [])
+        exit_mock.assert_called_with(1)
+        exists.assert_called_once()
+
+        self.assertEqual(1, len(exists.call_args[0]))
+        self.assertTrue(exists.call_args[0][0].endswith('foo.py'))
+
+        stdout = sys.stdout.getvalue()
+        self.assertEqual('Could not find a config for foo\n', stdout)
+
+    def test_run_config_fetch_integration(self):
+        config = fetch.run_config_fetch('depot_tools', [])
+        url = 'https://chromium.googlesource.com/chromium/tools/depot_tools.git'
+        spec = {
+            'type': 'gclient_git',
+            'gclient_git_spec': {
+                'solutions': [{
+                    'url': url,
+                    'managed': False,
+                    'name': 'depot_tools',
+                    'deps_file': 'DEPS',
+                }],
+            }
         }
-    }
-    self.assertEqual((spec, 'depot_tools'), config)
+        self.assertEqual((spec, 'depot_tools'), config)
 
-  def test_checkout_factory(self):
-    with self.assertRaises(KeyError):
-      fetch.CheckoutFactory('invalid', {}, {}, "root")
+    def test_checkout_factory(self):
+        with self.assertRaises(KeyError):
+            fetch.CheckoutFactory('invalid', {}, {}, "root")
 
-    gclient = fetch.CheckoutFactory('gclient', {}, {}, "root")
-    self.assertTrue(isinstance(gclient, fetch.GclientCheckout))
+        gclient = fetch.CheckoutFactory('gclient', {}, {}, "root")
+        self.assertTrue(isinstance(gclient, fetch.GclientCheckout))
 
 
 class TestCheckout(unittest.TestCase):
-  def setUp(self):
-    mock.patch('sys.stdout', StringIO()).start()
-    self.addCleanup(mock.patch.stopall)
-
-    self.opts = argparse.Namespace(dry_run=False)
-    self.checkout = fetch.Checkout(self.opts, {}, '')
-
-  @contextlib.contextmanager
-  def _temporary_file(self):
-    """Creates a temporary file and removes it once it's out of scope"""
-    name = tempfile.mktemp()
-    try:
-      with open(name, 'w+') as f:
-        yield f
-    finally:
-      os.remove(name)
-
-  def test_run_dry(self):
-    self.opts.dry_run = True
-    self.checkout.run(['foo-not-found'])
-    self.assertEqual('Running: foo-not-found\n', sys.stdout.getvalue())
-
-  def test_run_non_existing_command(self):
-    with self.assertRaises(OSError):
-      self.checkout.run(['foo-not-found'])
-    self.assertEqual('Running: foo-not-found\n', sys.stdout.getvalue())
-
-  def test_run_non_existing_command_return_stdout(self):
-    with self.assertRaises(OSError):
-      self.checkout.run(['foo-not-found'], return_stdout=True)
-    self.assertEqual('Running: foo-not-found\n', sys.stdout.getvalue())
-
-  @mock.patch('sys.stderr', StringIO())
-  @mock.patch('sys.exit', side_effect=SystemExitMock)
-  def test_run_wrong_param(self, exit_mock):
-    # mocked version of sys.std* is not passed to subprocess, use temp files
-    with self._temporary_file() as f:
-      with self.assertRaises(subprocess.CalledProcessError):
-        self.checkout.run([sys.executable, '-invalid-param'],
-                          return_stdout=True,
-                          stderr=f)
-      f.seek(0)
-      # Expect some message to stderr
-      self.assertNotEqual('', f.read())
-    self.assertEqual('', sys.stderr.getvalue())
-
-    with self._temporary_file() as f:
-      with self.assertRaises(SystemExitMock):
-        self.checkout.run([sys.executable, '-invalid-param'], stderr=f)
-      f.seek(0)
-      # Expect some message to stderr
-      self.assertNotEqual('', f.read())
-    self.assertIn('Subprocess failed with return code', sys.stdout.getvalue())
-    exit_mock.assert_called_once()
-
-  def test_run_return_as_value(self):
-    cmd = [sys.executable, '-c', 'print("foo")']
-
-    response = self.checkout.run(cmd, return_stdout=True)
-    # we expect no response other than information about command
-    self.assertNotIn('foo', sys.stdout.getvalue().split('\n'))
-    # this file should be included in response
-    self.assertEqual('foo', response.strip())
-
-  def test_run_print_to_stdout(self):
-    cmd = [sys.executable, '-c', 'print("foo")']
-
-    # mocked version of sys.std* is not passed to subprocess, use temp files
-    with self._temporary_file() as stdout:
-      with self._temporary_file() as stderr:
-        response = self.checkout.run(cmd, stdout=stdout, stderr=stderr)
-        stdout.seek(0)
-        stderr.seek(0)
-        self.assertEqual('foo\n', stdout.read())
-        self.assertEqual('', stderr.read())
-
-    stdout = sys.stdout.getvalue()
-    self.assertEqual('', response)
+    def setUp(self):
+        mock.patch('sys.stdout', StringIO()).start()
+        self.addCleanup(mock.patch.stopall)
+
+        self.opts = argparse.Namespace(dry_run=False)
+        self.checkout = fetch.Checkout(self.opts, {}, '')
+
+    @contextlib.contextmanager
+    def _temporary_file(self):
+        """Creates a temporary file and removes it once it's out of scope"""
+        name = tempfile.mktemp()
+        try:
+            with open(name, 'w+') as f:
+                yield f
+        finally:
+            os.remove(name)
+
+    def test_run_dry(self):
+        self.opts.dry_run = True
+        self.checkout.run(['foo-not-found'])
+        self.assertEqual('Running: foo-not-found\n', sys.stdout.getvalue())
+
+    def test_run_non_existing_command(self):
+        with self.assertRaises(OSError):
+            self.checkout.run(['foo-not-found'])
+        self.assertEqual('Running: foo-not-found\n', sys.stdout.getvalue())
+
+    def test_run_non_existing_command_return_stdout(self):
+        with self.assertRaises(OSError):
+            self.checkout.run(['foo-not-found'], return_stdout=True)
+        self.assertEqual('Running: foo-not-found\n', sys.stdout.getvalue())
+
+    @mock.patch('sys.stderr', StringIO())
+    @mock.patch('sys.exit', side_effect=SystemExitMock)
+    def test_run_wrong_param(self, exit_mock):
+        # mocked version of sys.std* is not passed to subprocess, use temp files
+        with self._temporary_file() as f:
+            with self.assertRaises(subprocess.CalledProcessError):
+                self.checkout.run([sys.executable, '-invalid-param'],
+                                  return_stdout=True,
+                                  stderr=f)
+            f.seek(0)
+            # Expect some message to stderr
+            self.assertNotEqual('', f.read())
+        self.assertEqual('', sys.stderr.getvalue())
+
+        with self._temporary_file() as f:
+            with self.assertRaises(SystemExitMock):
+                self.checkout.run([sys.executable, '-invalid-param'], stderr=f)
+            f.seek(0)
+            # Expect some message to stderr
+            self.assertNotEqual('', f.read())
+        self.assertIn('Subprocess failed with return code',
+                      sys.stdout.getvalue())
+        exit_mock.assert_called_once()
+
+    def test_run_return_as_value(self):
+        cmd = [sys.executable, '-c', 'print("foo")']
+
+        response = self.checkout.run(cmd, return_stdout=True)
+        # we expect no response other than information about command
+        self.assertNotIn('foo', sys.stdout.getvalue().split('\n'))
+        # this file should be included in response
+        self.assertEqual('foo', response.strip())
+
+    def test_run_print_to_stdout(self):
+        cmd = [sys.executable, '-c', 'print("foo")']
+
+        # mocked version of sys.std* is not passed to subprocess, use temp files
+        with self._temporary_file() as stdout:
+            with self._temporary_file() as stderr:
+                response = self.checkout.run(cmd, stdout=stdout, stderr=stderr)
+                stdout.seek(0)
+                stderr.seek(0)
+                self.assertEqual('foo\n', stdout.read())
+                self.assertEqual('', stderr.read())
+
+        stdout = sys.stdout.getvalue()
+        self.assertEqual('', response)
 
 
 class TestGClientCheckout(unittest.TestCase):
-  def setUp(self):
-    self.run = mock.patch('fetch.Checkout.run').start()
+    def setUp(self):
+        self.run = mock.patch('fetch.Checkout.run').start()
 
-    self.opts = argparse.Namespace(dry_run=False)
-    self.checkout = fetch.GclientCheckout(self.opts, {}, '/root')
+        self.opts = argparse.Namespace(dry_run=False)
+        self.checkout = fetch.GclientCheckout(self.opts, {}, '/root')
 
-    self.addCleanup(mock.patch.stopall)
+        self.addCleanup(mock.patch.stopall)
 
-  @mock.patch('distutils.spawn.find_executable', return_value=True)
-  def test_run_gclient_executable_found(self, find_executable):
-    self.checkout.run_gclient('foo', 'bar', baz='qux')
-    find_executable.assert_called_once_with('gclient')
-    self.run.assert_called_once_with(('gclient', 'foo', 'bar'), baz='qux')
+    @mock.patch('distutils.spawn.find_executable', return_value=True)
+    def test_run_gclient_executable_found(self, find_executable):
+        self.checkout.run_gclient('foo', 'bar', baz='qux')
+        find_executable.assert_called_once_with('gclient')
+        self.run.assert_called_once_with(('gclient', 'foo', 'bar'), baz='qux')
 
-  @mock.patch('distutils.spawn.find_executable', return_value=False)
-  def test_run_gclient_executable_not_found(self, find_executable):
-    self.checkout.run_gclient('foo', 'bar', baz='qux')
-    find_executable.assert_called_once_with('gclient')
-    args = self.run.call_args[0][0]
-    kargs = self.run.call_args[1]
+    @mock.patch('distutils.spawn.find_executable', return_value=False)
+    def test_run_gclient_executable_not_found(self, find_executable):
+        self.checkout.run_gclient('foo', 'bar', baz='qux')
+        find_executable.assert_called_once_with('gclient')
+        args = self.run.call_args[0][0]
+        kargs = self.run.call_args[1]
 
-    self.assertEqual(4, len(args))
-    self.assertEqual(sys.executable, args[0])
-    self.assertTrue(args[1].endswith('gclient.py'))
-    self.assertEqual(('foo', 'bar'), args[2:])
-    self.assertEqual({'baz': 'qux'}, kargs)
+        self.assertEqual(4, len(args))
+        self.assertEqual(sys.executable, args[0])
+        self.assertTrue(args[1].endswith('gclient.py'))
+        self.assertEqual(('foo', 'bar'), args[2:])
+        self.assertEqual({'baz': 'qux'}, kargs)
 
 
 class TestGclientGitCheckout(unittest.TestCase):
-  def setUp(self):
-    self.run_gclient = mock.patch('fetch.GclientCheckout.run_gclient').start()
-    self.run_git = mock.patch('fetch.GitCheckout.run_git').start()
-
-    self.opts = argparse.Namespace(dry_run=False, nohooks=True, nohistory=False)
-    specs = {
-        'solutions': [{
-            'foo': 'bar',
-            'baz': 1
-        }, {
-            'foo': False
-        }],
-        'with_branch_heads': True,
-    }
-
-    self.checkout = fetch.GclientGitCheckout(self.opts, specs, '/root')
-
-    self.addCleanup(mock.patch.stopall)
-
-  def test_init(self):
-    self.checkout.init()
-    self.assertEqual(2, self.run_gclient.call_count)
-    self.assertEqual(3, self.run_git.call_count)
-
-    # Verify only expected commands and ignore arguments to avoid copying
-    # commands from fetch.py
-    self.assertEqual(['config', 'sync'],
-                     [a[0][0] for a in self.run_gclient.call_args_list])
-    self.assertEqual(['submodule', 'config', 'config'],
-                     [a[0][0] for a in self.run_git.call_args_list])
-
-    # First call to gclient, format spec is expected to be called so "foo" is
-    # expected to be present
-    args = self.run_gclient.call_args_list[0][0]
-    self.assertEqual('config', args[0])
-    self.assertIn('foo', args[2])
+    def setUp(self):
+        self.run_gclient = mock.patch(
+            'fetch.GclientCheckout.run_gclient').start()
+        self.run_git = mock.patch('fetch.GitCheckout.run_git').start()
+
+        self.opts = argparse.Namespace(dry_run=False,
+                                       nohooks=True,
+                                       nohistory=False)
+        specs = {
+            'solutions': [{
+                'foo': 'bar',
+                'baz': 1
+            }, {
+                'foo': False
+            }],
+            'with_branch_heads': True,
+        }
+
+        self.checkout = fetch.GclientGitCheckout(self.opts, specs, '/root')
+
+        self.addCleanup(mock.patch.stopall)
+
+    def test_init(self):
+        self.checkout.init()
+        self.assertEqual(2, self.run_gclient.call_count)
+        self.assertEqual(3, self.run_git.call_count)
+
+        # Verify only expected commands and ignore arguments to avoid copying
+        # commands from fetch.py
+        self.assertEqual(['config', 'sync'],
+                         [a[0][0] for a in self.run_gclient.call_args_list])
+        self.assertEqual(['submodule', 'config', 'config'],
+                         [a[0][0] for a in self.run_git.call_args_list])
+
+        # First call to gclient, format spec is expected to be called so "foo"
+        # is expected to be present
+        args = self.run_gclient.call_args_list[0][0]
+        self.assertEqual('config', args[0])
+        self.assertIn('foo', args[2])
 
 
 if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  unittest.main()
+    logging.basicConfig(
+        level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
+    unittest.main()

+ 39 - 42
tests/fix_encoding_test.py

@@ -3,7 +3,6 @@
 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for fix_encoding.py."""
 
 from __future__ import print_function
@@ -18,47 +17,45 @@ import fix_encoding
 
 
 class FixEncodingTest(unittest.TestCase):
-  # Nice mix of latin, hebrew, arabic and chinese. Doesn't mean anything.
-  text = u'Héllô 偉大 سيد'
-
-  def test_code_page(self):
-    # Make sure printing garbage won't throw.
-    print(self.text.encode() + b'\xff')
-    print(self.text.encode() + b'\xff', file=sys.stderr)
-
-  def test_utf8(self):
-    # Make sure printing utf-8 works.
-    print(self.text.encode('utf-8'))
-    print(self.text.encode('utf-8'), file=sys.stderr)
-
-  @unittest.skipIf(os.name == 'nt', 'Does not work on Windows')
-  def test_unicode(self):
-    # Make sure printing unicode works.
-    print(self.text)
-    print(self.text, file=sys.stderr)
-
-  @unittest.skipIf(os.name == 'nt', 'Does not work on Windows')
-  def test_default_encoding(self):
-    self.assertEqual('utf-8', sys.getdefaultencoding())
-
-  def test_win_console(self):
-    if sys.platform != 'win32':
-      return
-    # This should fail if not redirected, e.g. run directly instead of through
-    # the presubmit check. Can be checked with:
-    # python tests\fix_encoding_test.py
-    self.assertEqual(
-        sys.stdout.__class__, fix_encoding.WinUnicodeOutput)
-    self.assertEqual(
-        sys.stderr.__class__, fix_encoding.WinUnicodeOutput)
-    self.assertEqual(sys.stdout.encoding, sys.getdefaultencoding())
-    self.assertEqual(sys.stderr.encoding, sys.getdefaultencoding())
-
-  def test_multiple_calls(self):
-    # Shouldn't do anything.
-    self.assertEqual(False, fix_encoding.fix_encoding())
+    # Nice mix of latin, hebrew, arabic and chinese. Doesn't mean anything.
+    text = u'Héllô 偉大 سيد'
+
+    def test_code_page(self):
+        # Make sure printing garbage won't throw.
+        print(self.text.encode() + b'\xff')
+        print(self.text.encode() + b'\xff', file=sys.stderr)
+
+    def test_utf8(self):
+        # Make sure printing utf-8 works.
+        print(self.text.encode('utf-8'))
+        print(self.text.encode('utf-8'), file=sys.stderr)
+
+    @unittest.skipIf(os.name == 'nt', 'Does not work on Windows')
+    def test_unicode(self):
+        # Make sure printing unicode works.
+        print(self.text)
+        print(self.text, file=sys.stderr)
+
+    @unittest.skipIf(os.name == 'nt', 'Does not work on Windows')
+    def test_default_encoding(self):
+        self.assertEqual('utf-8', sys.getdefaultencoding())
+
+    def test_win_console(self):
+        if sys.platform != 'win32':
+            return
+        # This should fail if not redirected, e.g. run directly instead of
+        # through the presubmit check. Can be checked with: python
+        # tests\fix_encoding_test.py
+        self.assertEqual(sys.stdout.__class__, fix_encoding.WinUnicodeOutput)
+        self.assertEqual(sys.stderr.__class__, fix_encoding.WinUnicodeOutput)
+        self.assertEqual(sys.stdout.encoding, sys.getdefaultencoding())
+        self.assertEqual(sys.stderr.encoding, sys.getdefaultencoding())
+
+    def test_multiple_calls(self):
+        # Shouldn't do anything.
+        self.assertEqual(False, fix_encoding.fix_encoding())
 
 
 if __name__ == '__main__':
-  fix_encoding.fix_encoding()
-  unittest.main()
+    fix_encoding.fix_encoding()
+    unittest.main()

+ 173 - 158
tests/gclient_cipd_smoketest.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2020 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Smoke tests for gclient.py.
 
 Shell out 'gclient' and run cipd tests.
@@ -21,169 +20,185 @@ CHROME_INFRA_URL = "https://chrome-infra-packages.appspot.com"
 
 
 class GClientSmokeCipd(gclient_smoketest_base.GClientSmokeBase):
-  def setUp(self):
-    super(GClientSmokeCipd, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    if not self.enabled:
-      self.skipTest('git fake repos not available')
-    self.env['PATH'] = (os.path.join(ROOT_DIR, 'testing_support')
-                        + os.pathsep + self.env['PATH'])
-
-  def testSyncCipd(self):
-    self.gclient(['config', self.git_base + 'repo_14', '--name', 'src'])
-    self.gclient(['sync'])
-
-    tree = self.mangle_git_tree(('repo_14@1', 'src'))
-    tree.update({
-        '_cipd': '\n'.join([
-            '$ParanoidMode CheckPresence',
-            '$OverrideInstallMode copy',
-            '',
-            '@Subdir src/another_cipd_dep',
-            'package1 1.1-cr0',
-            'package2 1.13',
-            '',
-            '@Subdir src/cipd_dep',
+    def setUp(self):
+        super(GClientSmokeCipd, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        if not self.enabled:
+            self.skipTest('git fake repos not available')
+        self.env['PATH'] = (os.path.join(ROOT_DIR, 'testing_support') +
+                            os.pathsep + self.env['PATH'])
+
+    def testSyncCipd(self):
+        self.gclient(['config', self.git_base + 'repo_14', '--name', 'src'])
+        self.gclient(['sync'])
+
+        tree = self.mangle_git_tree(('repo_14@1', 'src'))
+        tree.update({
+            '_cipd':
+            '\n'.join([
+                '$ParanoidMode CheckPresence',
+                '$OverrideInstallMode copy',
+                '',
+                '@Subdir src/another_cipd_dep',
+                'package1 1.1-cr0',
+                'package2 1.13',
+                '',
+                '@Subdir src/cipd_dep',
+                'package0 0.1',
+                '',
+                '@Subdir src/cipd_dep_with_cipd_variable',
+                'package3/${platform} 1.2',
+                '',
+                '',
+            ]),
+            'src/another_cipd_dep/_cipd':
+            '\n'.join([
+                'package1 1.1-cr0',
+                'package2 1.13',
+            ]),
+            'src/cipd_dep/_cipd':
             'package0 0.1',
-            '',
-            '@Subdir src/cipd_dep_with_cipd_variable',
+            'src/cipd_dep_with_cipd_variable/_cipd':
             'package3/${platform} 1.2',
-            '',
-            '',
-        ]),
-        'src/another_cipd_dep/_cipd': '\n'.join([
-            'package1 1.1-cr0',
-            'package2 1.13',
-        ]),
-        'src/cipd_dep/_cipd': 'package0 0.1',
-        'src/cipd_dep_with_cipd_variable/_cipd': 'package3/${platform} 1.2',
-    })
-    self.assertTree(tree)
-
-  def testConvertGitToCipd(self):
-    self.gclient(['config', self.git_base + 'repo_13', '--name', 'src'])
-
-    # repo_13@1 has src/repo12 as a git dependency.
-    self.gclient(
-        ['sync', '-v', '-v', '-v', '--revision', self.githash('repo_13', 1)])
-
-    tree = self.mangle_git_tree(('repo_13@1', 'src'),
-                                ('repo_12@1', 'src/repo12'))
-    self.assertTree(tree)
-
-    # repo_13@3 has src/repo12 as a cipd dependency.
-    self.gclient(
-        ['sync', '-v', '-v', '-v', '--revision', self.githash('repo_13', 3),
-         '--delete_unversioned_trees'])
-
-    tree = self.mangle_git_tree(('repo_13@3', 'src'))
-    tree.update({
-        '_cipd': '\n'.join([
-            '$ParanoidMode CheckPresence',
-            '$OverrideInstallMode copy',
-            '',
-            '@Subdir src/repo12',
+        })
+        self.assertTree(tree)
+
+    def testConvertGitToCipd(self):
+        self.gclient(['config', self.git_base + 'repo_13', '--name', 'src'])
+
+        # repo_13@1 has src/repo12 as a git dependency.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_13', 1)
+        ])
+
+        tree = self.mangle_git_tree(('repo_13@1', 'src'),
+                                    ('repo_12@1', 'src/repo12'))
+        self.assertTree(tree)
+
+        # repo_13@3 has src/repo12 as a cipd dependency.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_13', 3), '--delete_unversioned_trees'
+        ])
+
+        tree = self.mangle_git_tree(('repo_13@3', 'src'))
+        tree.update({
+            '_cipd':
+            '\n'.join([
+                '$ParanoidMode CheckPresence',
+                '$OverrideInstallMode copy',
+                '',
+                '@Subdir src/repo12',
+                'foo 1.3',
+                '',
+                '',
+            ]),
+            'src/repo12/_cipd':
             'foo 1.3',
-            '',
-            '',
-        ]),
-        'src/repo12/_cipd': 'foo 1.3',
-    })
-    self.assertTree(tree)
-
-  def testConvertCipdToGit(self):
-    self.gclient(['config', self.git_base + 'repo_13', '--name', 'src'])
-
-    # repo_13@3 has src/repo12 as a cipd dependency.
-    self.gclient(
-        ['sync', '-v', '-v', '-v', '--revision', self.githash('repo_13', 3),
-         '--delete_unversioned_trees'])
-
-    tree = self.mangle_git_tree(('repo_13@3', 'src'))
-    tree.update({
-        '_cipd': '\n'.join([
-            '$ParanoidMode CheckPresence',
-            '$OverrideInstallMode copy',
-            '',
-            '@Subdir src/repo12',
+        })
+        self.assertTree(tree)
+
+    def testConvertCipdToGit(self):
+        self.gclient(['config', self.git_base + 'repo_13', '--name', 'src'])
+
+        # repo_13@3 has src/repo12 as a cipd dependency.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_13', 3), '--delete_unversioned_trees'
+        ])
+
+        tree = self.mangle_git_tree(('repo_13@3', 'src'))
+        tree.update({
+            '_cipd':
+            '\n'.join([
+                '$ParanoidMode CheckPresence',
+                '$OverrideInstallMode copy',
+                '',
+                '@Subdir src/repo12',
+                'foo 1.3',
+                '',
+                '',
+            ]),
+            'src/repo12/_cipd':
             'foo 1.3',
-            '',
-            '',
-        ]),
-        'src/repo12/_cipd': 'foo 1.3',
-    })
-    self.assertTree(tree)
-
-    # repo_13@1 has src/repo12 as a git dependency.
-    self.gclient(
-        ['sync', '-v', '-v', '-v', '--revision', self.githash('repo_13', 1)])
-
-    tree = self.mangle_git_tree(('repo_13@1', 'src'),
-                                ('repo_12@1', 'src/repo12'))
-    tree.update({
-        '_cipd': '\n'.join([
-            '$ParanoidMode CheckPresence',
-            '$OverrideInstallMode copy',
-            '',
-            '@Subdir src/repo12',
+        })
+        self.assertTree(tree)
+
+        # repo_13@1 has src/repo12 as a git dependency.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_13', 1)
+        ])
+
+        tree = self.mangle_git_tree(('repo_13@1', 'src'),
+                                    ('repo_12@1', 'src/repo12'))
+        tree.update({
+            '_cipd':
+            '\n'.join([
+                '$ParanoidMode CheckPresence',
+                '$OverrideInstallMode copy',
+                '',
+                '@Subdir src/repo12',
+                'foo 1.3',
+                '',
+                '',
+            ]),
+            'src/repo12/_cipd':
             'foo 1.3',
-            '',
-            '',
-        ]),
-        'src/repo12/_cipd': 'foo 1.3',
-    })
-    self.assertTree(tree)
-
-  def testRevInfo(self):
-    self.gclient(['config', self.git_base + 'repo_18', '--name', 'src'])
-    self.gclient(['sync'])
-    results = self.gclient(['revinfo'])
-    out = ('src: %(base)srepo_18\n'
-           'src/cipd_dep:package0: %(instance_url1)s\n'
-           'src/cipd_dep:package0/${platform}: %(instance_url2)s\n'
-           'src/cipd_dep:package1/another: %(instance_url3)s\n' % {
-               'base':
-               self.git_base,
-               'instance_url1':
-               CHROME_INFRA_URL + '/package0@package0-fake-tag:1.0',
-               'instance_url2':
-               CHROME_INFRA_URL +
-               '/package0/${platform}@package0/${platform}-fake-tag:1.0',
-               'instance_url3':
-               CHROME_INFRA_URL +
-               '/package1/another@package1/another-fake-tag:1.0',
-           })
-    self.check((out, '', 0), results)
-
-  def testRevInfoActual(self):
-    self.gclient(['config', self.git_base + 'repo_18', '--name', 'src'])
-    self.gclient(['sync'])
-    results = self.gclient(['revinfo', '--actual'])
-    out = (
-        'src: %(base)srepo_18@%(hash1)s\n'
-        'src/cipd_dep:package0: %(instance_url1)s\n'
-        'src/cipd_dep:package0/${platform}: %(instance_url2)s\n'
-        'src/cipd_dep:package1/another: %(instance_url3)s\n' % {
-            'base':
-            self.git_base,
-            'hash1':
-            self.githash('repo_18', 1),
-            'instance_url1':
-            # The below 'fake-*' and 'platform-expanded-*' ID's are from:
-            # ../testing_support/fake_cipd.py. 'fake-resolved' represents
-            # the package being found in the batch resolution mechanism.
-            CHROME_INFRA_URL + '/p/package0/+/package0-fake-resolved-id',
-            'instance_url2':
-            CHROME_INFRA_URL + '/p/package0/platform-expanded-test-only' +
-            '/+/package0/${platform}-fake-instance-id',
-            'instance_url3':
-            CHROME_INFRA_URL + '/p/package1/another' +
-            '/+/package1/another-fake-resolved-id',
         })
-    self.check((out, '', 0), results)
+        self.assertTree(tree)
+
+    def testRevInfo(self):
+        self.gclient(['config', self.git_base + 'repo_18', '--name', 'src'])
+        self.gclient(['sync'])
+        results = self.gclient(['revinfo'])
+        out = ('src: %(base)srepo_18\n'
+               'src/cipd_dep:package0: %(instance_url1)s\n'
+               'src/cipd_dep:package0/${platform}: %(instance_url2)s\n'
+               'src/cipd_dep:package1/another: %(instance_url3)s\n' % {
+                   'base':
+                   self.git_base,
+                   'instance_url1':
+                   CHROME_INFRA_URL + '/package0@package0-fake-tag:1.0',
+                   'instance_url2':
+                   CHROME_INFRA_URL +
+                   '/package0/${platform}@package0/${platform}-fake-tag:1.0',
+                   'instance_url3':
+                   CHROME_INFRA_URL +
+                   '/package1/another@package1/another-fake-tag:1.0',
+               })
+        self.check((out, '', 0), results)
+
+    def testRevInfoActual(self):
+        self.gclient(['config', self.git_base + 'repo_18', '--name', 'src'])
+        self.gclient(['sync'])
+        results = self.gclient(['revinfo', '--actual'])
+        out = (
+            'src: %(base)srepo_18@%(hash1)s\n'
+            'src/cipd_dep:package0: %(instance_url1)s\n'
+            'src/cipd_dep:package0/${platform}: %(instance_url2)s\n'
+            'src/cipd_dep:package1/another: %(instance_url3)s\n' % {
+                'base':
+                self.git_base,
+                'hash1':
+                self.githash('repo_18', 1),
+                'instance_url1':
+                # The below 'fake-*' and 'platform-expanded-*' ID's are from:
+                # ../testing_support/fake_cipd.py. 'fake-resolved' represents
+                # the package being found in the batch resolution mechanism.
+                CHROME_INFRA_URL + '/p/package0/+/package0-fake-resolved-id',
+                'instance_url2':
+                CHROME_INFRA_URL + '/p/package0/platform-expanded-test-only' +
+                '/+/package0/${platform}-fake-instance-id',
+                'instance_url3':
+                CHROME_INFRA_URL + '/p/package1/another' +
+                '/+/package1/another-fake-resolved-id',
+            })
+        self.check((out, '', 0), results)
 
 
 if __name__ == '__main__':
-  if '-v' in sys.argv:
-    logging.basicConfig(level=logging.DEBUG)
-  unittest.main()
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()

+ 1261 - 1105
tests/gclient_eval_unittest.py

@@ -22,1133 +22,1289 @@ import gclient
 import gclient_eval
 import gclient_utils
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class GClientEvalTest(unittest.TestCase):
-  def test_str(self):
-    self.assertEqual('foo', gclient_eval._gclient_eval('"foo"'))
-
-  def test_tuple(self):
-    self.assertEqual(('a', 'b'), gclient_eval._gclient_eval('("a", "b")'))
-
-  def test_list(self):
-    self.assertEqual(['a', 'b'], gclient_eval._gclient_eval('["a", "b"]'))
-
-  def test_dict(self):
-    self.assertEqual({'a': 'b'}, gclient_eval._gclient_eval('{"a": "b"}'))
-
-  def test_name_safe(self):
-    self.assertEqual(True, gclient_eval._gclient_eval('True'))
-
-  def test_name_unsafe(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval._gclient_eval('UnsafeName')
-    self.assertIn('invalid name \'UnsafeName\'', str(cm.exception))
-
-  def test_invalid_call(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval._gclient_eval('Foo("bar")')
-    self.assertIn('Str and Var are the only allowed functions',
-                  str(cm.exception))
-
-  def test_expands_vars(self):
-    self.assertEqual(
-        'foo',
-        gclient_eval._gclient_eval('Var("bar")', vars_dict={'bar': 'foo'}))
-    self.assertEqual(
-        'baz',
-        gclient_eval._gclient_eval(
-            'Var("bar")',
-            vars_dict={'bar': gclient_eval.ConstantString('baz')}))
-
-  def test_expands_vars_with_braces(self):
-    self.assertEqual(
-        'foo',
-        gclient_eval._gclient_eval('"{bar}"', vars_dict={'bar': 'foo'}))
-    self.assertEqual(
-        'baz',
-        gclient_eval._gclient_eval(
-            '"{bar}"',
-            vars_dict={'bar': gclient_eval.ConstantString('baz')}))
-
-  def test_invalid_var(self):
-    with self.assertRaises(KeyError) as cm:
-      gclient_eval._gclient_eval('"{bar}"', vars_dict={})
-    self.assertIn('bar was used as a variable, but was not declared',
-                  str(cm.exception))
-
-  def test_plus(self):
-    self.assertEqual('foo', gclient_eval._gclient_eval('"f" + "o" + "o"'))
-
-  def test_format(self):
-    self.assertEqual('foo', gclient_eval._gclient_eval('"%s" % "foo"'))
-
-  def test_not_expression(self):
-    with self.assertRaises(SyntaxError) as cm:
-      gclient_eval._gclient_eval('def foo():\n  pass')
-    self.assertIn('invalid syntax', str(cm.exception))
-
-  def test_not_whitelisted(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval._gclient_eval('[x for x in [1, 2, 3]]')
-    self.assertIn(
-        'unexpected AST node: <_ast.ListComp object', str(cm.exception))
-
-  def test_dict_ordered(self):
-    for test_case in itertools.permutations(range(4)):
-      input_data = ['{'] + ['"%s": "%s",' % (n, n) for n in test_case] + ['}']
-      expected = [(str(n), str(n)) for n in test_case]
-      result = gclient_eval._gclient_eval(''.join(input_data))
-      self.assertEqual(expected, list(result.items()))
+    def test_str(self):
+        self.assertEqual('foo', gclient_eval._gclient_eval('"foo"'))
+
+    def test_tuple(self):
+        self.assertEqual(('a', 'b'), gclient_eval._gclient_eval('("a", "b")'))
+
+    def test_list(self):
+        self.assertEqual(['a', 'b'], gclient_eval._gclient_eval('["a", "b"]'))
+
+    def test_dict(self):
+        self.assertEqual({'a': 'b'}, gclient_eval._gclient_eval('{"a": "b"}'))
+
+    def test_name_safe(self):
+        self.assertEqual(True, gclient_eval._gclient_eval('True'))
+
+    def test_name_unsafe(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval._gclient_eval('UnsafeName')
+        self.assertIn('invalid name \'UnsafeName\'', str(cm.exception))
+
+    def test_invalid_call(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval._gclient_eval('Foo("bar")')
+        self.assertIn('Str and Var are the only allowed functions',
+                      str(cm.exception))
+
+    def test_expands_vars(self):
+        self.assertEqual(
+            'foo',
+            gclient_eval._gclient_eval('Var("bar")', vars_dict={'bar': 'foo'}))
+        self.assertEqual(
+            'baz',
+            gclient_eval._gclient_eval(
+                'Var("bar")',
+                vars_dict={'bar': gclient_eval.ConstantString('baz')}))
+
+    def test_expands_vars_with_braces(self):
+        self.assertEqual(
+            'foo',
+            gclient_eval._gclient_eval('"{bar}"', vars_dict={'bar': 'foo'}))
+        self.assertEqual(
+            'baz',
+            gclient_eval._gclient_eval(
+                '"{bar}"',
+                vars_dict={'bar': gclient_eval.ConstantString('baz')}))
+
+    def test_invalid_var(self):
+        with self.assertRaises(KeyError) as cm:
+            gclient_eval._gclient_eval('"{bar}"', vars_dict={})
+        self.assertIn('bar was used as a variable, but was not declared',
+                      str(cm.exception))
+
+    def test_plus(self):
+        self.assertEqual('foo', gclient_eval._gclient_eval('"f" + "o" + "o"'))
+
+    def test_format(self):
+        self.assertEqual('foo', gclient_eval._gclient_eval('"%s" % "foo"'))
+
+    def test_not_expression(self):
+        with self.assertRaises(SyntaxError) as cm:
+            gclient_eval._gclient_eval('def foo():\n  pass')
+        self.assertIn('invalid syntax', str(cm.exception))
+
+    def test_not_whitelisted(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval._gclient_eval('[x for x in [1, 2, 3]]')
+        self.assertIn('unexpected AST node: <_ast.ListComp object',
+                      str(cm.exception))
+
+    def test_dict_ordered(self):
+        for test_case in itertools.permutations(range(4)):
+            input_data = ['{'] + ['"%s": "%s",' % (n, n)
+                                  for n in test_case] + ['}']
+            expected = [(str(n), str(n)) for n in test_case]
+            result = gclient_eval._gclient_eval(''.join(input_data))
+            self.assertEqual(expected, list(result.items()))
 
 
 class ExecTest(unittest.TestCase):
-  def test_multiple_assignment(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval.Exec('a, b, c = "a", "b", "c"')
-    self.assertIn(
-        'invalid assignment: target should be a name', str(cm.exception))
-
-  def test_override(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval.Exec('a = "a"\na = "x"')
-    self.assertIn(
-        'invalid assignment: overrides var \'a\'', str(cm.exception))
-
-  def test_schema_wrong_type(self):
-    with self.assertRaises(gclient_utils.Error):
-      gclient_eval.Exec('include_rules = {}')
-
-  def test_recursedeps_list(self):
-    local_scope = gclient_eval.Exec(
-        'recursedeps = [["src/third_party/angle", "DEPS.chromium"]]')
-    self.assertEqual(
-        {'recursedeps': [['src/third_party/angle', 'DEPS.chromium']]},
-        local_scope)
-
-  def test_var(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'vars = {',
-        '  "foo": "bar",',
-        '  "baz": Str("quux")',
-        '}',
-        'deps = {',
-        '  "a_dep": "a" + Var("foo") + "b" + Var("baz"),',
-        '}',
-    ]))
-    Str = gclient_eval.ConstantString
-    self.assertEqual({
-        'vars': {'foo': 'bar', 'baz': Str('quux')},
-        'deps': {'a_dep': 'abarbquux'},
-    }, local_scope)
-
-  def test_braces_var(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'vars = {',
-        '  "foo": "bar",',
-        '  "baz": Str("quux")',
-        '}',
-        'deps = {',
-        '  "a_dep": "a{foo}b{baz}",',
-        '}',
-    ]))
-    Str = gclient_eval.ConstantString
-    self.assertEqual({
-        'vars': {'foo': 'bar',
-                 'baz': Str('quux')},
-        'deps': {'a_dep': 'abarbquux'},
-    }, local_scope)
-
-  def test_empty_deps(self):
-    local_scope = gclient_eval.Exec('deps = {}')
-    self.assertEqual({'deps': {}}, local_scope)
-
-  def test_overrides_vars(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'vars = {',
-        '  "foo": "bar",',
-        '  "quux": Str("quuz")',
-        '}',
-        'deps = {',
-        '  "a_dep": "a{foo}b",',
-        '  "b_dep": "c{quux}d",',
-        '}',
-    ]), vars_override={'foo': 'baz', 'quux': 'corge'})
-    Str = gclient_eval.ConstantString
-    self.assertEqual({
-        'vars': {'foo': 'bar', 'quux': Str('quuz')},
-        'deps': {'a_dep': 'abazb', 'b_dep': 'ccorged'},
-    }, local_scope)
-
-  def test_doesnt_override_undeclared_vars(self):
-    with self.assertRaises(KeyError) as cm:
-      gclient_eval.Exec('\n'.join([
-          'vars = {',
-          '  "foo": "bar",',
-          '}',
-          'deps = {',
-          '  "a_dep": "a{baz}b",',
-          '}',
-      ]), vars_override={'baz': 'lalala'})
-    self.assertIn('baz was used as a variable, but was not declared',
-                  str(cm.exception))
-
-  def test_doesnt_allow_duplicate_deps(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval.Parse('\n'.join([
-        'deps = {',
-        '  "a_dep": {',
-        '    "url": "a_url@a_rev",',
-        '    "condition": "foo",',
-        '  },',
-        '  "a_dep": {',
-        '    "url": "a_url@another_rev",',
-        '    "condition": "not foo",',
-        '  }',
-        '}',
-      ]), '<unknown>')
-    self.assertIn('duplicate key in dictionary: a_dep', str(cm.exception))
+    def test_multiple_assignment(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval.Exec('a, b, c = "a", "b", "c"')
+        self.assertIn('invalid assignment: target should be a name',
+                      str(cm.exception))
+
+    def test_override(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval.Exec('a = "a"\na = "x"')
+        self.assertIn('invalid assignment: overrides var \'a\'',
+                      str(cm.exception))
+
+    def test_schema_wrong_type(self):
+        with self.assertRaises(gclient_utils.Error):
+            gclient_eval.Exec('include_rules = {}')
+
+    def test_recursedeps_list(self):
+        local_scope = gclient_eval.Exec(
+            'recursedeps = [["src/third_party/angle", "DEPS.chromium"]]')
+        self.assertEqual(
+            {'recursedeps': [['src/third_party/angle', 'DEPS.chromium']]},
+            local_scope)
+
+    def test_var(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'vars = {',
+            '  "foo": "bar",',
+            '  "baz": Str("quux")',
+            '}',
+            'deps = {',
+            '  "a_dep": "a" + Var("foo") + "b" + Var("baz"),',
+            '}',
+        ]))
+        Str = gclient_eval.ConstantString
+        self.assertEqual(
+            {
+                'vars': {
+                    'foo': 'bar',
+                    'baz': Str('quux')
+                },
+                'deps': {
+                    'a_dep': 'abarbquux'
+                },
+            }, local_scope)
+
+    def test_braces_var(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'vars = {',
+            '  "foo": "bar",',
+            '  "baz": Str("quux")',
+            '}',
+            'deps = {',
+            '  "a_dep": "a{foo}b{baz}",',
+            '}',
+        ]))
+        Str = gclient_eval.ConstantString
+        self.assertEqual(
+            {
+                'vars': {
+                    'foo': 'bar',
+                    'baz': Str('quux')
+                },
+                'deps': {
+                    'a_dep': 'abarbquux'
+                },
+            }, local_scope)
+
+    def test_empty_deps(self):
+        local_scope = gclient_eval.Exec('deps = {}')
+        self.assertEqual({'deps': {}}, local_scope)
+
+    def test_overrides_vars(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'vars = {',
+            '  "foo": "bar",',
+            '  "quux": Str("quuz")',
+            '}',
+            'deps = {',
+            '  "a_dep": "a{foo}b",',
+            '  "b_dep": "c{quux}d",',
+            '}',
+        ]),
+                                        vars_override={
+                                            'foo': 'baz',
+                                            'quux': 'corge'
+                                        })
+        Str = gclient_eval.ConstantString
+        self.assertEqual(
+            {
+                'vars': {
+                    'foo': 'bar',
+                    'quux': Str('quuz')
+                },
+                'deps': {
+                    'a_dep': 'abazb',
+                    'b_dep': 'ccorged'
+                },
+            }, local_scope)
+
+    def test_doesnt_override_undeclared_vars(self):
+        with self.assertRaises(KeyError) as cm:
+            gclient_eval.Exec('\n'.join([
+                'vars = {',
+                '  "foo": "bar",',
+                '}',
+                'deps = {',
+                '  "a_dep": "a{baz}b",',
+                '}',
+            ]),
+                              vars_override={'baz': 'lalala'})
+        self.assertIn('baz was used as a variable, but was not declared',
+                      str(cm.exception))
+
+    def test_doesnt_allow_duplicate_deps(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval.Parse(
+                '\n'.join([
+                    'deps = {',
+                    '  "a_dep": {',
+                    '    "url": "a_url@a_rev",',
+                    '    "condition": "foo",',
+                    '  },',
+                    '  "a_dep": {',
+                    '    "url": "a_url@another_rev",',
+                    '    "condition": "not foo",',
+                    '  }',
+                    '}',
+                ]), '<unknown>')
+        self.assertIn('duplicate key in dictionary: a_dep', str(cm.exception))
 
 
 class UpdateConditionTest(unittest.TestCase):
-  def test_both_present(self):
-    info = {'condition': 'foo'}
-    gclient_eval.UpdateCondition(info, 'and', 'bar')
-    self.assertEqual(info, {'condition': '(foo) and (bar)'})
-
-    info = {'condition': 'foo'}
-    gclient_eval.UpdateCondition(info, 'or', 'bar')
-    self.assertEqual(info, {'condition': '(foo) or (bar)'})
-
-  def test_one_present_and(self):
-    # If one of info's condition or new_condition is present, and |op| == 'and'
-    # then the the result must be the present condition.
-    info = {'condition': 'foo'}
-    gclient_eval.UpdateCondition(info, 'and', None)
-    self.assertEqual(info, {'condition': 'foo'})
-
-    info = {}
-    gclient_eval.UpdateCondition(info, 'and', 'bar')
-    self.assertEqual(info, {'condition': 'bar'})
-
-  def test_both_absent_and(self):
-    # Nothing happens
-    info = {}
-    gclient_eval.UpdateCondition(info, 'and', None)
-    self.assertEqual(info, {})
-
-  def test_or(self):
-    # If one of info's condition and new_condition is not present, then there
-    # shouldn't be a condition. An absent value is treated as implicitly True.
-    info = {'condition': 'foo'}
-    gclient_eval.UpdateCondition(info, 'or', None)
-    self.assertEqual(info, {})
-
-    info = {}
-    gclient_eval.UpdateCondition(info, 'or', 'bar')
-    self.assertEqual(info, {})
-
-    info = {}
-    gclient_eval.UpdateCondition(info, 'or', None)
-    self.assertEqual(info, {})
+    def test_both_present(self):
+        info = {'condition': 'foo'}
+        gclient_eval.UpdateCondition(info, 'and', 'bar')
+        self.assertEqual(info, {'condition': '(foo) and (bar)'})
+
+        info = {'condition': 'foo'}
+        gclient_eval.UpdateCondition(info, 'or', 'bar')
+        self.assertEqual(info, {'condition': '(foo) or (bar)'})
+
+    def test_one_present_and(self):
+        # If one of info's condition or new_condition is present, and |op| ==
+        # 'and' then the the result must be the present condition.
+        info = {'condition': 'foo'}
+        gclient_eval.UpdateCondition(info, 'and', None)
+        self.assertEqual(info, {'condition': 'foo'})
+
+        info = {}
+        gclient_eval.UpdateCondition(info, 'and', 'bar')
+        self.assertEqual(info, {'condition': 'bar'})
+
+    def test_both_absent_and(self):
+        # Nothing happens
+        info = {}
+        gclient_eval.UpdateCondition(info, 'and', None)
+        self.assertEqual(info, {})
+
+    def test_or(self):
+        # If one of info's condition and new_condition is not present, then
+        # there shouldn't be a condition. An absent value is treated as
+        # implicitly True.
+        info = {'condition': 'foo'}
+        gclient_eval.UpdateCondition(info, 'or', None)
+        self.assertEqual(info, {})
+
+        info = {}
+        gclient_eval.UpdateCondition(info, 'or', 'bar')
+        self.assertEqual(info, {})
+
+        info = {}
+        gclient_eval.UpdateCondition(info, 'or', None)
+        self.assertEqual(info, {})
 
 
 class EvaluateConditionTest(unittest.TestCase):
-  def test_true(self):
-    self.assertTrue(gclient_eval.EvaluateCondition('True', {}))
-
-  def test_variable(self):
-    self.assertFalse(gclient_eval.EvaluateCondition('foo', {'foo': 'False'}))
-
-  def test_variable_cyclic_reference(self):
-    with self.assertRaises(ValueError) as cm:
-      self.assertTrue(gclient_eval.EvaluateCondition('bar', {'bar': 'bar'}))
-    self.assertIn(
-        'invalid cyclic reference to \'bar\' (inside \'bar\')',
-        str(cm.exception))
-
-  def test_operators(self):
-    self.assertFalse(gclient_eval.EvaluateCondition(
-        'a and not (b or c)', {'a': 'True', 'b': 'False', 'c': 'True'}))
-
-  def test_expansion(self):
-    self.assertTrue(gclient_eval.EvaluateCondition(
-        'a or b', {'a': 'b and c', 'b': 'not c', 'c': 'False'}))
-
-  def test_string_equality(self):
-    self.assertTrue(gclient_eval.EvaluateCondition(
-        'foo == "baz"', {'foo': '"baz"'}))
-    self.assertFalse(gclient_eval.EvaluateCondition(
-        'foo == "bar"', {'foo': '"baz"'}))
-
-  def test_string_inequality(self):
-    self.assertTrue(gclient_eval.EvaluateCondition(
-        'foo != "bar"', {'foo': '"baz"'}))
-    self.assertFalse(gclient_eval.EvaluateCondition(
-        'foo != "baz"', {'foo': '"baz"'}))
-
-  def test_triple_or(self):
-    self.assertTrue(gclient_eval.EvaluateCondition(
-        'a or b or c', {'a': 'False', 'b': 'False', 'c': 'True'}))
-    self.assertFalse(gclient_eval.EvaluateCondition(
-        'a or b or c', {'a': 'False', 'b': 'False', 'c': 'False'}))
-
-  def test_triple_and(self):
-    self.assertTrue(gclient_eval.EvaluateCondition(
-        'a and b and c', {'a': 'True', 'b': 'True', 'c': 'True'}))
-    self.assertFalse(gclient_eval.EvaluateCondition(
-        'a and b and c', {'a': 'True', 'b': 'True', 'c': 'False'}))
-
-  def test_triple_and_and_or(self):
-    self.assertTrue(gclient_eval.EvaluateCondition(
-        'a and b and c or d or e',
-        {'a': 'False', 'b': 'False', 'c': 'False', 'd': 'False', 'e': 'True'}))
-    self.assertFalse(gclient_eval.EvaluateCondition(
-        'a and b and c or d or e',
-        {'a': 'True', 'b': 'True', 'c': 'False', 'd': 'False', 'e': 'False'}))
-
-  def test_string_bool(self):
-    self.assertFalse(gclient_eval.EvaluateCondition(
-        'false_str_var and true_var',
-        {'false_str_var': 'False', 'true_var': True}))
-
-  def test_string_bool_typo(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval.EvaluateCondition(
-          'false_var_str and true_var',
-          {'false_str_var': 'False', 'true_var': True})
-    self.assertIn(
-        'invalid "and" operand \'false_var_str\' '
-            '(inside \'false_var_str and true_var\')',
-        str(cm.exception))
-
-  def test_non_bool_in_or(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval.EvaluateCondition(
-          'string_var or true_var',
-          {'string_var': 'Kittens', 'true_var': True})
-    self.assertIn(
-        'invalid "or" operand \'Kittens\' '
-            '(inside \'string_var or true_var\')',
-        str(cm.exception))
-
-  def test_non_bool_in_and(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval.EvaluateCondition(
-          'string_var and true_var',
-          {'string_var': 'Kittens', 'true_var': True})
-    self.assertIn(
-        'invalid "and" operand \'Kittens\' '
-            '(inside \'string_var and true_var\')',
-        str(cm.exception))
-
-  def test_tuple_presence(self):
-    self.assertTrue(gclient_eval.EvaluateCondition(
-      'foo in ("bar", "baz")', {'foo': 'bar'}))
-    self.assertFalse(gclient_eval.EvaluateCondition(
-      'foo in ("bar", "baz")', {'foo': 'not_bar'}))
-
-  def test_unsupported_tuple_operation(self):
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval.EvaluateCondition('foo == ("bar", "baz")', {'foo': 'bar'})
-    self.assertIn('unexpected AST node', str(cm.exception))
-
-    with self.assertRaises(ValueError) as cm:
-      gclient_eval.EvaluateCondition('(foo,) == "bar"', {'foo': 'bar'})
-    self.assertIn('unexpected AST node', str(cm.exception))
-
-  def test_str_in_condition(self):
-    Str = gclient_eval.ConstantString
-    self.assertTrue(gclient_eval.EvaluateCondition(
-        's_var == "foo"',
-        {'s_var': Str("foo")}))
-
-    self.assertFalse(gclient_eval.EvaluateCondition(
-        's_var in ("baz", "quux")',
-        {'s_var': Str("foo")}))
+    def test_true(self):
+        self.assertTrue(gclient_eval.EvaluateCondition('True', {}))
+
+    def test_variable(self):
+        self.assertFalse(gclient_eval.EvaluateCondition('foo',
+                                                        {'foo': 'False'}))
+
+    def test_variable_cyclic_reference(self):
+        with self.assertRaises(ValueError) as cm:
+            self.assertTrue(
+                gclient_eval.EvaluateCondition('bar', {'bar': 'bar'}))
+        self.assertIn('invalid cyclic reference to \'bar\' (inside \'bar\')',
+                      str(cm.exception))
+
+    def test_operators(self):
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('a and not (b or c)', {
+                'a': 'True',
+                'b': 'False',
+                'c': 'True'
+            }))
+
+    def test_expansion(self):
+        self.assertTrue(
+            gclient_eval.EvaluateCondition('a or b', {
+                'a': 'b and c',
+                'b': 'not c',
+                'c': 'False'
+            }))
+
+    def test_string_equality(self):
+        self.assertTrue(
+            gclient_eval.EvaluateCondition('foo == "baz"', {'foo': '"baz"'}))
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('foo == "bar"', {'foo': '"baz"'}))
+
+    def test_string_inequality(self):
+        self.assertTrue(
+            gclient_eval.EvaluateCondition('foo != "bar"', {'foo': '"baz"'}))
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('foo != "baz"', {'foo': '"baz"'}))
+
+    def test_triple_or(self):
+        self.assertTrue(
+            gclient_eval.EvaluateCondition('a or b or c', {
+                'a': 'False',
+                'b': 'False',
+                'c': 'True'
+            }))
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('a or b or c', {
+                'a': 'False',
+                'b': 'False',
+                'c': 'False'
+            }))
+
+    def test_triple_and(self):
+        self.assertTrue(
+            gclient_eval.EvaluateCondition('a and b and c', {
+                'a': 'True',
+                'b': 'True',
+                'c': 'True'
+            }))
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('a and b and c', {
+                'a': 'True',
+                'b': 'True',
+                'c': 'False'
+            }))
+
+    def test_triple_and_and_or(self):
+        self.assertTrue(
+            gclient_eval.EvaluateCondition('a and b and c or d or e', {
+                'a': 'False',
+                'b': 'False',
+                'c': 'False',
+                'd': 'False',
+                'e': 'True'
+            }))
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('a and b and c or d or e', {
+                'a': 'True',
+                'b': 'True',
+                'c': 'False',
+                'd': 'False',
+                'e': 'False'
+            }))
+
+    def test_string_bool(self):
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('false_str_var and true_var', {
+                'false_str_var': 'False',
+                'true_var': True
+            }))
+
+    def test_string_bool_typo(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval.EvaluateCondition('false_var_str and true_var', {
+                'false_str_var': 'False',
+                'true_var': True
+            })
+        self.assertIn(
+            'invalid "and" operand \'false_var_str\' '
+            '(inside \'false_var_str and true_var\')', str(cm.exception))
+
+    def test_non_bool_in_or(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval.EvaluateCondition('string_var or true_var', {
+                'string_var': 'Kittens',
+                'true_var': True
+            })
+        self.assertIn(
+            'invalid "or" operand \'Kittens\' '
+            '(inside \'string_var or true_var\')', str(cm.exception))
+
+    def test_non_bool_in_and(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval.EvaluateCondition('string_var and true_var', {
+                'string_var': 'Kittens',
+                'true_var': True
+            })
+        self.assertIn(
+            'invalid "and" operand \'Kittens\' '
+            '(inside \'string_var and true_var\')', str(cm.exception))
+
+    def test_tuple_presence(self):
+        self.assertTrue(
+            gclient_eval.EvaluateCondition('foo in ("bar", "baz")',
+                                           {'foo': 'bar'}))
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('foo in ("bar", "baz")',
+                                           {'foo': 'not_bar'}))
+
+    def test_unsupported_tuple_operation(self):
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval.EvaluateCondition('foo == ("bar", "baz")',
+                                           {'foo': 'bar'})
+        self.assertIn('unexpected AST node', str(cm.exception))
+
+        with self.assertRaises(ValueError) as cm:
+            gclient_eval.EvaluateCondition('(foo,) == "bar"', {'foo': 'bar'})
+        self.assertIn('unexpected AST node', str(cm.exception))
+
+    def test_str_in_condition(self):
+        Str = gclient_eval.ConstantString
+        self.assertTrue(
+            gclient_eval.EvaluateCondition('s_var == "foo"',
+                                           {'s_var': Str("foo")}))
+
+        self.assertFalse(
+            gclient_eval.EvaluateCondition('s_var in ("baz", "quux")',
+                                           {'s_var': Str("foo")}))
+
 
 class VarTest(unittest.TestCase):
-  def assert_adds_var(self, before, after):
-    local_scope = gclient_eval.Exec('\n'.join(before))
-    gclient_eval.AddVar(local_scope, 'baz', 'lemur')
-    results = gclient_eval.RenderDEPSFile(local_scope)
-    self.assertEqual(results, '\n'.join(after))
-
-  def test_adds_var(self):
-    before = [
-        'vars = {',
-        '  "foo": "bar",',
-        '}',
-    ]
-    after = [
-        'vars = {',
-        '  "baz": "lemur",',
-        '  "foo": "bar",',
-        '}',
-    ]
-    self.assert_adds_var(before, after)
-
-  def test_adds_var_twice(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'vars = {',
-        '  "foo": "bar",',
-        '}',
-    ]))
-
-    gclient_eval.AddVar(local_scope, 'baz', 'lemur')
-    gclient_eval.AddVar(local_scope, 'v8_revision', 'deadbeef')
-    result = gclient_eval.RenderDEPSFile(local_scope)
-
-    self.assertEqual(result, '\n'.join([
-        'vars = {',
-        '  "v8_revision": "deadbeef",',
-        '  "baz": "lemur",',
-        '  "foo": "bar",',
-        '}',
-    ]))
-
-  def test_gets_and_sets_var(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'vars = {',
-        '  "foo": "bar",',
-        '  "quux": Str("quuz")',
-        '}',
-    ]))
-
-    self.assertEqual(gclient_eval.GetVar(local_scope, 'foo'),
-                     "bar")
-    self.assertEqual(gclient_eval.GetVar(local_scope, 'quux'),
-                     "quuz")
-
-    gclient_eval.SetVar(local_scope, 'foo', 'baz')
-    gclient_eval.SetVar(local_scope, 'quux', 'corge')
-    result = gclient_eval.RenderDEPSFile(local_scope)
-
-    self.assertEqual(result, '\n'.join([
-        'vars = {',
-        '  "foo": "baz",',
-        '  "quux": Str("corge")',
-        '}',
-    ]))
-
-  def test_gets_and_sets_var_non_string(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'vars = {',
-        '  "foo": True,',
-        '}',
-    ]))
-
-    result = gclient_eval.GetVar(local_scope, 'foo')
-    self.assertEqual(result, True)
-
-    gclient_eval.SetVar(local_scope, 'foo', 'False')
-    result = gclient_eval.RenderDEPSFile(local_scope)
-
-    self.assertEqual(result, '\n'.join([
-        'vars = {',
-        '  "foo": False,',
-        '}',
-    ]))
-
-
-  def test_add_preserves_formatting(self):
-    before = [
-        '# Copyright stuff',
-        '# some initial comments',
-        '',
-        'vars = { ',
-        '  # Some comments.',
-        '  "foo": "bar",',
-        '',
-        '  # More comments.',
-        '  # Even more comments.',
-        '  "v8_revision":   ',
-        '       "deadbeef",',
-        ' # Someone formatted this wrong',
-        '}',
-    ]
-    after = [
-        '# Copyright stuff',
-        '# some initial comments',
-        '',
-        'vars = { ',
-        '  "baz": "lemur",',
-        '  # Some comments.',
-        '  "foo": "bar",',
-        '',
-        '  # More comments.',
-        '  # Even more comments.',
-        '  "v8_revision":   ',
-        '       "deadbeef",',
-        ' # Someone formatted this wrong',
-        '}',
-    ]
-    self.assert_adds_var(before, after)
-
-  def test_set_preserves_formatting(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'vars = {',
-        '   # Comment with trailing space ',
-        ' "foo": \'bar\',',
-        '}',
-    ]))
-
-    gclient_eval.SetVar(local_scope, 'foo', 'baz')
-    result = gclient_eval.RenderDEPSFile(local_scope)
-
-    self.assertEqual(result, '\n'.join([
-        'vars = {',
-        '   # Comment with trailing space ',
-        ' "foo": \'baz\',',
-        '}',
-    ]))
+    def assert_adds_var(self, before, after):
+        local_scope = gclient_eval.Exec('\n'.join(before))
+        gclient_eval.AddVar(local_scope, 'baz', 'lemur')
+        results = gclient_eval.RenderDEPSFile(local_scope)
+        self.assertEqual(results, '\n'.join(after))
+
+    def test_adds_var(self):
+        before = [
+            'vars = {',
+            '  "foo": "bar",',
+            '}',
+        ]
+        after = [
+            'vars = {',
+            '  "baz": "lemur",',
+            '  "foo": "bar",',
+            '}',
+        ]
+        self.assert_adds_var(before, after)
+
+    def test_adds_var_twice(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'vars = {',
+            '  "foo": "bar",',
+            '}',
+        ]))
+
+        gclient_eval.AddVar(local_scope, 'baz', 'lemur')
+        gclient_eval.AddVar(local_scope, 'v8_revision', 'deadbeef')
+        result = gclient_eval.RenderDEPSFile(local_scope)
+
+        self.assertEqual(
+            result, '\n'.join([
+                'vars = {',
+                '  "v8_revision": "deadbeef",',
+                '  "baz": "lemur",',
+                '  "foo": "bar",',
+                '}',
+            ]))
+
+    def test_gets_and_sets_var(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'vars = {',
+            '  "foo": "bar",',
+            '  "quux": Str("quuz")',
+            '}',
+        ]))
+
+        self.assertEqual(gclient_eval.GetVar(local_scope, 'foo'), "bar")
+        self.assertEqual(gclient_eval.GetVar(local_scope, 'quux'), "quuz")
+
+        gclient_eval.SetVar(local_scope, 'foo', 'baz')
+        gclient_eval.SetVar(local_scope, 'quux', 'corge')
+        result = gclient_eval.RenderDEPSFile(local_scope)
+
+        self.assertEqual(
+            result, '\n'.join([
+                'vars = {',
+                '  "foo": "baz",',
+                '  "quux": Str("corge")',
+                '}',
+            ]))
+
+    def test_gets_and_sets_var_non_string(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'vars = {',
+            '  "foo": True,',
+            '}',
+        ]))
+
+        result = gclient_eval.GetVar(local_scope, 'foo')
+        self.assertEqual(result, True)
+
+        gclient_eval.SetVar(local_scope, 'foo', 'False')
+        result = gclient_eval.RenderDEPSFile(local_scope)
+
+        self.assertEqual(result, '\n'.join([
+            'vars = {',
+            '  "foo": False,',
+            '}',
+        ]))
+
+    def test_add_preserves_formatting(self):
+        before = [
+            '# Copyright stuff',
+            '# some initial comments',
+            '',
+            'vars = { ',
+            '  # Some comments.',
+            '  "foo": "bar",',
+            '',
+            '  # More comments.',
+            '  # Even more comments.',
+            '  "v8_revision":   ',
+            '       "deadbeef",',
+            ' # Someone formatted this wrong',
+            '}',
+        ]
+        after = [
+            '# Copyright stuff',
+            '# some initial comments',
+            '',
+            'vars = { ',
+            '  "baz": "lemur",',
+            '  # Some comments.',
+            '  "foo": "bar",',
+            '',
+            '  # More comments.',
+            '  # Even more comments.',
+            '  "v8_revision":   ',
+            '       "deadbeef",',
+            ' # Someone formatted this wrong',
+            '}',
+        ]
+        self.assert_adds_var(before, after)
+
+    def test_set_preserves_formatting(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'vars = {',
+            '   # Comment with trailing space ',
+            ' "foo": \'bar\',',
+            '}',
+        ]))
+
+        gclient_eval.SetVar(local_scope, 'foo', 'baz')
+        result = gclient_eval.RenderDEPSFile(local_scope)
+
+        self.assertEqual(
+            result, '\n'.join([
+                'vars = {',
+                '   # Comment with trailing space ',
+                ' "foo": \'baz\',',
+                '}',
+            ]))
 
 
 class CipdTest(unittest.TestCase):
-  def test_gets_and_sets_cipd(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'deps = {',
-        '    "src/cipd/package": {',
-        '        "packages": [',
-        '            {',
-        '                "package": "some/cipd/package",',
-        '                "version": "deadbeef",',
-        '            },',
-        '            {',
-        '                "package": "another/cipd/package",',
-        '                "version": "version:5678",',
-        '            },',
-        '        ],',
-        '        "condition": "checkout_android",',
-        '        "dep_type": "cipd",',
-        '    },',
-        '}',
-    ]))
-
-    self.assertEqual(
-        gclient_eval.GetCIPD(
-            local_scope, 'src/cipd/package', 'some/cipd/package'),
-        'deadbeef')
-
-    self.assertEqual(
-        gclient_eval.GetCIPD(
-            local_scope, 'src/cipd/package', 'another/cipd/package'),
-        'version:5678')
-
-    gclient_eval.SetCIPD(
-        local_scope, 'src/cipd/package', 'another/cipd/package', 'version:6789')
-    gclient_eval.SetCIPD(
-        local_scope, 'src/cipd/package', 'some/cipd/package', 'foobar')
-    result = gclient_eval.RenderDEPSFile(local_scope)
-
-    self.assertEqual(result, '\n'.join([
-        'deps = {',
-        '    "src/cipd/package": {',
-        '        "packages": [',
-        '            {',
-        '                "package": "some/cipd/package",',
-        '                "version": "foobar",',
-        '            },',
-        '            {',
-        '                "package": "another/cipd/package",',
-        '                "version": "version:6789",',
-        '            },',
-        '        ],',
-        '        "condition": "checkout_android",',
-        '        "dep_type": "cipd",',
-        '    },',
-        '}',
-    ]))
-
-  def test_gets_and_sets_cipd_vars(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'vars = {',
-        '    "cipd-rev": "git_revision:deadbeef",',
-        '    "another-cipd-rev": "version:1.0.3",',
-        '}',
-        'deps = {',
-        '    "src/cipd/package": {',
-        '        "packages": [',
-        '            {',
-        '                "package": "some/cipd/package",',
-        '                "version": Var("cipd-rev"),',
-        '            },',
-        '            {',
-        '                "package": "another/cipd/package",',
-        '                "version": "{another-cipd-rev}",',
-        '            },',
-        '        ],',
-        '        "condition": "checkout_android",',
-        '        "dep_type": "cipd",',
-        '    },',
-        '}',
-    ]))
-
-    self.assertEqual(
-        gclient_eval.GetCIPD(
-            local_scope, 'src/cipd/package', 'some/cipd/package'),
-        'git_revision:deadbeef')
-
-    self.assertEqual(
-        gclient_eval.GetCIPD(
-            local_scope, 'src/cipd/package', 'another/cipd/package'),
-        'version:1.0.3')
-
-    gclient_eval.SetCIPD(
-        local_scope, 'src/cipd/package', 'another/cipd/package',
-        'version:1.1.0')
-    gclient_eval.SetCIPD(
-        local_scope, 'src/cipd/package', 'some/cipd/package',
-        'git_revision:foobar')
-    result = gclient_eval.RenderDEPSFile(local_scope)
-
-    self.assertEqual(result, '\n'.join([
-        'vars = {',
-        '    "cipd-rev": "git_revision:foobar",',
-        '    "another-cipd-rev": "version:1.1.0",',
-        '}',
-        'deps = {',
-        '    "src/cipd/package": {',
-        '        "packages": [',
-        '            {',
-        '                "package": "some/cipd/package",',
-        '                "version": Var("cipd-rev"),',
-        '            },',
-        '            {',
-        '                "package": "another/cipd/package",',
-        '                "version": "{another-cipd-rev}",',
-        '            },',
-        '        ],',
-        '        "condition": "checkout_android",',
-        '        "dep_type": "cipd",',
-        '    },',
-        '}',
-    ]))
-
-  def test_preserves_escaped_vars(self):
-    local_scope = gclient_eval.Exec('\n'.join([
-        'deps = {',
-        '    "src/cipd/package": {',
-        '        "packages": [',
-        '            {',
-        '                "package": "package/${{platform}}",',
-        '                "version": "version:abcd",',
-        '            },',
-        '        ],',
-        '        "dep_type": "cipd",',
-        '    },',
-        '}',
-    ]))
-
-    gclient_eval.SetCIPD(
-        local_scope, 'src/cipd/package', 'package/${platform}', 'version:dcba')
-    result = gclient_eval.RenderDEPSFile(local_scope)
-
-    self.assertEqual(result, '\n'.join([
-        'deps = {',
-        '    "src/cipd/package": {',
-        '        "packages": [',
-        '            {',
-        '                "package": "package/${{platform}}",',
-        '                "version": "version:dcba",',
-        '            },',
-        '        ],',
-        '        "dep_type": "cipd",',
-        '    },',
-        '}',
-    ]))
+    def test_gets_and_sets_cipd(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'deps = {',
+            '    "src/cipd/package": {',
+            '        "packages": [',
+            '            {',
+            '                "package": "some/cipd/package",',
+            '                "version": "deadbeef",',
+            '            },',
+            '            {',
+            '                "package": "another/cipd/package",',
+            '                "version": "version:5678",',
+            '            },',
+            '        ],',
+            '        "condition": "checkout_android",',
+            '        "dep_type": "cipd",',
+            '    },',
+            '}',
+        ]))
+
+        self.assertEqual(
+            gclient_eval.GetCIPD(local_scope, 'src/cipd/package',
+                                 'some/cipd/package'), 'deadbeef')
+
+        self.assertEqual(
+            gclient_eval.GetCIPD(local_scope, 'src/cipd/package',
+                                 'another/cipd/package'), 'version:5678')
+
+        gclient_eval.SetCIPD(local_scope, 'src/cipd/package',
+                             'another/cipd/package', 'version:6789')
+        gclient_eval.SetCIPD(local_scope, 'src/cipd/package',
+                             'some/cipd/package', 'foobar')
+        result = gclient_eval.RenderDEPSFile(local_scope)
+
+        self.assertEqual(
+            result, '\n'.join([
+                'deps = {',
+                '    "src/cipd/package": {',
+                '        "packages": [',
+                '            {',
+                '                "package": "some/cipd/package",',
+                '                "version": "foobar",',
+                '            },',
+                '            {',
+                '                "package": "another/cipd/package",',
+                '                "version": "version:6789",',
+                '            },',
+                '        ],',
+                '        "condition": "checkout_android",',
+                '        "dep_type": "cipd",',
+                '    },',
+                '}',
+            ]))
+
+    def test_gets_and_sets_cipd_vars(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'vars = {',
+            '    "cipd-rev": "git_revision:deadbeef",',
+            '    "another-cipd-rev": "version:1.0.3",',
+            '}',
+            'deps = {',
+            '    "src/cipd/package": {',
+            '        "packages": [',
+            '            {',
+            '                "package": "some/cipd/package",',
+            '                "version": Var("cipd-rev"),',
+            '            },',
+            '            {',
+            '                "package": "another/cipd/package",',
+            '                "version": "{another-cipd-rev}",',
+            '            },',
+            '        ],',
+            '        "condition": "checkout_android",',
+            '        "dep_type": "cipd",',
+            '    },',
+            '}',
+        ]))
+
+        self.assertEqual(
+            gclient_eval.GetCIPD(local_scope, 'src/cipd/package',
+                                 'some/cipd/package'), 'git_revision:deadbeef')
+
+        self.assertEqual(
+            gclient_eval.GetCIPD(local_scope, 'src/cipd/package',
+                                 'another/cipd/package'), 'version:1.0.3')
+
+        gclient_eval.SetCIPD(local_scope, 'src/cipd/package',
+                             'another/cipd/package', 'version:1.1.0')
+        gclient_eval.SetCIPD(local_scope, 'src/cipd/package',
+                             'some/cipd/package', 'git_revision:foobar')
+        result = gclient_eval.RenderDEPSFile(local_scope)
+
+        self.assertEqual(
+            result, '\n'.join([
+                'vars = {',
+                '    "cipd-rev": "git_revision:foobar",',
+                '    "another-cipd-rev": "version:1.1.0",',
+                '}',
+                'deps = {',
+                '    "src/cipd/package": {',
+                '        "packages": [',
+                '            {',
+                '                "package": "some/cipd/package",',
+                '                "version": Var("cipd-rev"),',
+                '            },',
+                '            {',
+                '                "package": "another/cipd/package",',
+                '                "version": "{another-cipd-rev}",',
+                '            },',
+                '        ],',
+                '        "condition": "checkout_android",',
+                '        "dep_type": "cipd",',
+                '    },',
+                '}',
+            ]))
+
+    def test_preserves_escaped_vars(self):
+        local_scope = gclient_eval.Exec('\n'.join([
+            'deps = {',
+            '    "src/cipd/package": {',
+            '        "packages": [',
+            '            {',
+            '                "package": "package/${{platform}}",',
+            '                "version": "version:abcd",',
+            '            },',
+            '        ],',
+            '        "dep_type": "cipd",',
+            '    },',
+            '}',
+        ]))
+
+        gclient_eval.SetCIPD(local_scope, 'src/cipd/package',
+                             'package/${platform}', 'version:dcba')
+        result = gclient_eval.RenderDEPSFile(local_scope)
+
+        self.assertEqual(
+            result, '\n'.join([
+                'deps = {',
+                '    "src/cipd/package": {',
+                '        "packages": [',
+                '            {',
+                '                "package": "package/${{platform}}",',
+                '                "version": "version:dcba",',
+                '            },',
+                '        ],',
+                '        "dep_type": "cipd",',
+                '    },',
+                '}',
+            ]))
 
 
 class RevisionTest(unittest.TestCase):
-  def assert_gets_and_sets_revision(self, before, after, rev_before='deadbeef'):
-    local_scope = gclient_eval.Exec('\n'.join(before))
-
-    result = gclient_eval.GetRevision(local_scope, 'src/dep')
-    self.assertEqual(result, rev_before)
-
-    gclient_eval.SetRevision(local_scope, 'src/dep', 'deadfeed')
-    self.assertEqual('\n'.join(after), gclient_eval.RenderDEPSFile(local_scope))
-
-  def test_revision(self):
-    before = [
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@deadbeef",',
-        '}',
-    ]
-    after = [
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@deadfeed",',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
-
-  def test_revision_new_line(self):
-    before = [
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@"',
-        '             + "deadbeef",',
-        '}',
-    ]
-    after = [
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@"',
-        '             + "deadfeed",',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
-
-  def test_revision_windows_local_path(self):
-    before = [
-        'deps = {',
-        '  "src/dep": "file:///C:\\\\path.git@deadbeef",',
-        '}',
-    ]
-    after = [
-        'deps = {',
-        '  "src/dep": "file:///C:\\\\path.git@deadfeed",',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
-
-  def test_revision_multiline_strings(self):
-    deps = [
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@"',
-        '             "deadbeef",',
-        '}',
-    ]
-    with self.assertRaises(ValueError) as e:
-      local_scope = gclient_eval.Exec('\n'.join(deps))
-      gclient_eval.SetRevision(local_scope, 'src/dep', 'deadfeed')
-    self.assertEqual(
-        'Can\'t update value for src/dep. Multiline strings and implicitly '
-        'concatenated strings are not supported.\n'
-        'Consider reformatting the DEPS file.',
-        str(e.exception))
-
-  def test_revision_implicitly_concatenated_strings(self):
-    deps = [
-        'deps = {',
-        '  "src/dep": "https://example.com" + "/dep.git@" "deadbeef",',
-        '}',
-    ]
-    with self.assertRaises(ValueError) as e:
-      local_scope = gclient_eval.Exec('\n'.join(deps))
-      gclient_eval.SetRevision(local_scope, 'src/dep', 'deadfeed')
-    self.assertEqual(
-        'Can\'t update value for src/dep. Multiline strings and implicitly '
-        'concatenated strings are not supported.\n'
-        'Consider reformatting the DEPS file.',
-        str(e.exception))
-
-  def test_revision_inside_dict(self):
-    before = [
-        'deps = {',
-        '  "src/dep": {',
-        '    "url": "https://example.com/dep.git@deadbeef",',
-        '    "condition": "some_condition",',
-        '  },',
-        '}',
-    ]
-    after = [
-        'deps = {',
-        '  "src/dep": {',
-        '    "url": "https://example.com/dep.git@deadfeed",',
-        '    "condition": "some_condition",',
-        '  },',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
-
-  def test_follows_var_braces(self):
-    before = [
-        'vars = {',
-        '  "dep_revision": "deadbeef",',
-        '}',
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@{dep_revision}",',
-        '}',
-    ]
-    after = [
-        'vars = {',
-        '  "dep_revision": "deadfeed",',
-        '}',
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@{dep_revision}",',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
-
-  def test_follows_var_braces_newline(self):
-    before = [
-        'vars = {',
-        '  "dep_revision": "deadbeef",',
-        '}',
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git"',
-        '             + "@{dep_revision}",',
-        '}',
-    ]
-    after = [
-        'vars = {',
-        '  "dep_revision": "deadfeed",',
-        '}',
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git"',
-        '             + "@{dep_revision}",',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
-
-  def test_follows_var_function(self):
-    before = [
-        'vars = {',
-        '  "dep_revision": "deadbeef",',
-        '}',
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@" + Var("dep_revision"),',
-        '}',
-    ]
-    after = [
-        'vars = {',
-        '  "dep_revision": "deadfeed",',
-        '}',
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@" + Var("dep_revision"),',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
-
-  def test_pins_revision(self):
-    before = [
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git",',
-        '}',
-    ]
-    after = [
-        'deps = {',
-        '  "src/dep": "https://example.com/dep.git@deadfeed",',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after, rev_before=None)
-
-  def test_preserves_variables(self):
-    before = [
-        'vars = {',
-        '  "src_root": "src"',
-        '}',
-        'deps = {',
-        '  "{src_root}/dep": "https://example.com/dep.git@deadbeef",',
-        '}',
-    ]
-    after = [
-        'vars = {',
-        '  "src_root": "src"',
-        '}',
-        'deps = {',
-        '  "{src_root}/dep": "https://example.com/dep.git@deadfeed",',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
-
-  def test_preserves_formatting(self):
-    before = [
-        'vars = {',
-        ' # Some comment on deadbeef ',
-        '  "dep_revision": "deadbeef",',
-        '}',
-        'deps = {',
-        '  "src/dep": {',
-        '    "url": "https://example.com/dep.git@" + Var("dep_revision"),',
-        '',
-        '    "condition": "some_condition",',
-        ' },',
-        '}',
-    ]
-    after = [
-        'vars = {',
-        ' # Some comment on deadbeef ',
-        '  "dep_revision": "deadfeed",',
-        '}',
-        'deps = {',
-        '  "src/dep": {',
-        '    "url": "https://example.com/dep.git@" + Var("dep_revision"),',
-        '',
-        '    "condition": "some_condition",',
-        ' },',
-        '}',
-    ]
-    self.assert_gets_and_sets_revision(before, after)
+    def assert_gets_and_sets_revision(self,
+                                      before,
+                                      after,
+                                      rev_before='deadbeef'):
+        local_scope = gclient_eval.Exec('\n'.join(before))
+
+        result = gclient_eval.GetRevision(local_scope, 'src/dep')
+        self.assertEqual(result, rev_before)
+
+        gclient_eval.SetRevision(local_scope, 'src/dep', 'deadfeed')
+        self.assertEqual('\n'.join(after),
+                         gclient_eval.RenderDEPSFile(local_scope))
+
+    def test_revision(self):
+        before = [
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@deadbeef",',
+            '}',
+        ]
+        after = [
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@deadfeed",',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
+
+    def test_revision_new_line(self):
+        before = [
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@"',
+            '             + "deadbeef",',
+            '}',
+        ]
+        after = [
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@"',
+            '             + "deadfeed",',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
+
+    def test_revision_windows_local_path(self):
+        before = [
+            'deps = {',
+            '  "src/dep": "file:///C:\\\\path.git@deadbeef",',
+            '}',
+        ]
+        after = [
+            'deps = {',
+            '  "src/dep": "file:///C:\\\\path.git@deadfeed",',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
+
+    def test_revision_multiline_strings(self):
+        deps = [
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@"',
+            '             "deadbeef",',
+            '}',
+        ]
+        with self.assertRaises(ValueError) as e:
+            local_scope = gclient_eval.Exec('\n'.join(deps))
+            gclient_eval.SetRevision(local_scope, 'src/dep', 'deadfeed')
+        self.assertEqual(
+            'Can\'t update value for src/dep. Multiline strings and implicitly '
+            'concatenated strings are not supported.\n'
+            'Consider reformatting the DEPS file.', str(e.exception))
+
+    def test_revision_implicitly_concatenated_strings(self):
+        deps = [
+            'deps = {',
+            '  "src/dep": "https://example.com" + "/dep.git@" "deadbeef",',
+            '}',
+        ]
+        with self.assertRaises(ValueError) as e:
+            local_scope = gclient_eval.Exec('\n'.join(deps))
+            gclient_eval.SetRevision(local_scope, 'src/dep', 'deadfeed')
+        self.assertEqual(
+            'Can\'t update value for src/dep. Multiline strings and implicitly '
+            'concatenated strings are not supported.\n'
+            'Consider reformatting the DEPS file.', str(e.exception))
+
+    def test_revision_inside_dict(self):
+        before = [
+            'deps = {',
+            '  "src/dep": {',
+            '    "url": "https://example.com/dep.git@deadbeef",',
+            '    "condition": "some_condition",',
+            '  },',
+            '}',
+        ]
+        after = [
+            'deps = {',
+            '  "src/dep": {',
+            '    "url": "https://example.com/dep.git@deadfeed",',
+            '    "condition": "some_condition",',
+            '  },',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
+
+    def test_follows_var_braces(self):
+        before = [
+            'vars = {',
+            '  "dep_revision": "deadbeef",',
+            '}',
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@{dep_revision}",',
+            '}',
+        ]
+        after = [
+            'vars = {',
+            '  "dep_revision": "deadfeed",',
+            '}',
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@{dep_revision}",',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
+
+    def test_follows_var_braces_newline(self):
+        before = [
+            'vars = {',
+            '  "dep_revision": "deadbeef",',
+            '}',
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git"',
+            '             + "@{dep_revision}",',
+            '}',
+        ]
+        after = [
+            'vars = {',
+            '  "dep_revision": "deadfeed",',
+            '}',
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git"',
+            '             + "@{dep_revision}",',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
+
+    def test_follows_var_function(self):
+        before = [
+            'vars = {',
+            '  "dep_revision": "deadbeef",',
+            '}',
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@" + Var("dep_revision"),',
+            '}',
+        ]
+        after = [
+            'vars = {',
+            '  "dep_revision": "deadfeed",',
+            '}',
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@" + Var("dep_revision"),',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
+
+    def test_pins_revision(self):
+        before = [
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git",',
+            '}',
+        ]
+        after = [
+            'deps = {',
+            '  "src/dep": "https://example.com/dep.git@deadfeed",',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after, rev_before=None)
+
+    def test_preserves_variables(self):
+        before = [
+            'vars = {',
+            '  "src_root": "src"',
+            '}',
+            'deps = {',
+            '  "{src_root}/dep": "https://example.com/dep.git@deadbeef",',
+            '}',
+        ]
+        after = [
+            'vars = {',
+            '  "src_root": "src"',
+            '}',
+            'deps = {',
+            '  "{src_root}/dep": "https://example.com/dep.git@deadfeed",',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
+
+    def test_preserves_formatting(self):
+        before = [
+            'vars = {',
+            ' # Some comment on deadbeef ',
+            '  "dep_revision": "deadbeef",',
+            '}',
+            'deps = {',
+            '  "src/dep": {',
+            '    "url": "https://example.com/dep.git@" + Var("dep_revision"),',
+            '',
+            '    "condition": "some_condition",',
+            ' },',
+            '}',
+        ]
+        after = [
+            'vars = {',
+            ' # Some comment on deadbeef ',
+            '  "dep_revision": "deadfeed",',
+            '}',
+            'deps = {',
+            '  "src/dep": {',
+            '    "url": "https://example.com/dep.git@" + Var("dep_revision"),',
+            '',
+            '    "condition": "some_condition",',
+            ' },',
+            '}',
+        ]
+        self.assert_gets_and_sets_revision(before, after)
 
 
 class ParseTest(unittest.TestCase):
-  def callParse(self, vars_override=None):
-    return gclient_eval.Parse('\n'.join([
-        'vars = {',
-        '  "foo": "bar",',
-        '}',
-        'deps = {',
-        '  "a_dep": "a{foo}b",',
-        '}',
-    ]), '<unknown>', vars_override)
-
-  def test_supports_vars_inside_vars(self):
-    deps_file = '\n'.join([
-        'vars = {',
-        '  "foo": "bar",',
-        '  "baz": "\\"{foo}\\" == \\"bar\\"",',
-        '}',
-        'deps = {',
-        '  "src/baz": {',
-        '    "url": "baz_url",',
-        '    "condition": "baz",',
-        '  },',
-        '}',
-    ])
-    local_scope = gclient_eval.Parse(deps_file, '<unknown>', None)
-    self.assertEqual({
-        'vars': {'foo': 'bar',
-                 'baz': '"bar" == "bar"'},
-        'deps': {'src/baz': {'url': 'baz_url',
-                             'dep_type': 'git',
-                             'condition': 'baz'}},
-    }, local_scope)
-
-  def test_has_builtin_vars(self):
-    builtin_vars = {'builtin_var': 'foo'}
-    deps_file = '\n'.join([
-      'deps = {',
-      '  "a_dep": "a{builtin_var}b",',
-      '}',
-    ])
-    local_scope = gclient_eval.Parse(deps_file, '<unknown>', None, builtin_vars)
-    self.assertEqual({
-      'deps': {'a_dep': {'url': 'afoob',
-                         'dep_type': 'git'}},
-    }, local_scope)
-
-  def test_declaring_builtin_var_has_no_effect(self):
-    builtin_vars = {'builtin_var': 'foo'}
-    deps_file = '\n'.join([
-        'vars = {',
-        '  "builtin_var": "bar",',
-        '}',
-        'deps = {',
-        '  "a_dep": "a{builtin_var}b",',
-        '}',
-    ])
-    local_scope = gclient_eval.Parse(deps_file, '<unknown>', None, builtin_vars)
-    self.assertEqual({
-      'vars': {'builtin_var': 'bar'},
-      'deps': {'a_dep': {'url': 'afoob',
-                         'dep_type': 'git'}},
-    }, local_scope)
-
-  def test_override_builtin_var(self):
-    builtin_vars = {'builtin_var': 'foo'}
-    vars_override = {'builtin_var': 'override'}
-    deps_file = '\n'.join([
-      'deps = {',
-      '  "a_dep": "a{builtin_var}b",',
-      '}',
-    ])
-    local_scope = gclient_eval.Parse(
-        deps_file, '<unknown>', vars_override, builtin_vars)
-    self.assertEqual({
-      'deps': {'a_dep': {'url': 'aoverrideb',
-                         'dep_type': 'git'}},
-    }, local_scope, str(local_scope))
-
-  def test_expands_vars(self):
-    local_scope = self.callParse()
-    self.assertEqual({
-        'vars': {'foo': 'bar'},
-        'deps': {'a_dep': {'url': 'abarb',
-                           'dep_type': 'git'}},
-    }, local_scope)
-
-  def test_overrides_vars(self):
-    local_scope = self.callParse(vars_override={'foo': 'baz'})
-    self.assertEqual({
-        'vars': {'foo': 'bar'},
-        'deps': {'a_dep': {'url': 'abazb',
-                           'dep_type': 'git'}},
-    }, local_scope)
-
-  def test_no_extra_vars(self):
-    deps_file = '\n'.join([
-        'vars = {',
-        '  "foo": "bar",',
-        '}',
-        'deps = {',
-        '  "a_dep": "a{baz}b",',
-        '}',
-    ])
-
-    with self.assertRaises(KeyError) as cm:
-      gclient_eval.Parse(deps_file, '<unknown>', {'baz': 'lalala'})
-    self.assertIn('baz was used as a variable, but was not declared',
-                  str(cm.exception))
-
-  def test_standardizes_deps_string_dep(self):
-    local_scope = gclient_eval.Parse('\n'.join([
-      'deps = {',
-      '  "a_dep": "a_url@a_rev",',
-      '}',
-    ]), '<unknown>')
-    self.assertEqual({
-        'deps': {'a_dep': {'url': 'a_url@a_rev',
-                           'dep_type': 'git'}},
-    }, local_scope)
-
-  def test_standardizes_deps_dict_dep(self):
-    local_scope = gclient_eval.Parse('\n'.join([
-      'deps = {',
-      '  "a_dep": {',
-      '     "url": "a_url@a_rev",',
-      '     "condition": "checkout_android",',
-      '  },',
-      '}',
-    ]), '<unknown>')
-    self.assertEqual({
-        'deps': {'a_dep': {'url': 'a_url@a_rev',
-                           'dep_type': 'git',
-                           'condition': 'checkout_android'}},
-    }, local_scope)
-
-  def test_ignores_none_in_deps_os(self):
-    local_scope = gclient_eval.Parse('\n'.join([
-      'deps = {',
-      '  "a_dep": "a_url@a_rev",',
-      '}',
-      'deps_os = {',
-      '  "mac": {',
-      '     "a_dep": None,',
-      '  },',
-      '}',
-    ]), '<unknown>')
-    self.assertEqual({
-        'deps': {'a_dep': {'url': 'a_url@a_rev',
-                           'dep_type': 'git'}},
-    }, local_scope)
-
-  def test_merges_deps_os_extra_dep(self):
-    local_scope = gclient_eval.Parse('\n'.join([
-      'deps = {',
-      '  "a_dep": "a_url@a_rev",',
-      '}',
-      'deps_os = {',
-      '  "mac": {',
-      '     "b_dep": "b_url@b_rev"',
-      '  },',
-      '}',
-    ]), '<unknown>')
-    self.assertEqual({
-        'deps': {'a_dep': {'url': 'a_url@a_rev',
-                           'dep_type': 'git'},
-                 'b_dep': {'url': 'b_url@b_rev',
-                           'dep_type': 'git',
-                           'condition': 'checkout_mac'}},
-    }, local_scope)
-
-  def test_merges_deps_os_existing_dep_with_no_condition(self):
-    local_scope = gclient_eval.Parse('\n'.join([
-      'deps = {',
-      '  "a_dep": "a_url@a_rev",',
-      '}',
-      'deps_os = {',
-      '  "mac": {',
-      '     "a_dep": "a_url@a_rev"',
-      '  },',
-      '}',
-    ]), '<unknown>')
-    self.assertEqual({
-        'deps': {'a_dep': {'url': 'a_url@a_rev',
-                           'dep_type': 'git'}},
-    }, local_scope)
-
-  def test_merges_deps_os_existing_dep_with_condition(self):
-    local_scope = gclient_eval.Parse('\n'.join([
-      'deps = {',
-      '  "a_dep": {',
-      '    "url": "a_url@a_rev",',
-      '    "condition": "some_condition",',
-      '  },',
-      '}',
-      'deps_os = {',
-      '  "mac": {',
-      '     "a_dep": "a_url@a_rev"',
-      '  },',
-      '}',
-    ]), '<unknown>')
-    self.assertEqual({
-        'deps': {
-            'a_dep': {'url': 'a_url@a_rev',
-                      'dep_type': 'git',
-                      'condition': '(checkout_mac) or (some_condition)'},
-        },
-    }, local_scope)
-
-  def test_merges_deps_os_multiple_os(self):
-    local_scope = gclient_eval.Parse('\n'.join([
-      'deps_os = {',
-      '  "win": {'
-      '     "a_dep": "a_url@a_rev"',
-      '  },',
-      '  "mac": {',
-      '     "a_dep": "a_url@a_rev"',
-      '  },',
-      '}',
-    ]), '<unknown>')
-    self.assertEqual({
-        'deps': {
-            'a_dep': {'url': 'a_url@a_rev',
-                      'dep_type': 'git',
-                      'condition': '(checkout_mac) or (checkout_win)'},
-        },
-    }, local_scope)
-
-  def test_fails_to_merge_same_dep_with_different_revisions(self):
-    with self.assertRaises(gclient_eval.gclient_utils.Error) as cm:
-      gclient_eval.Parse('\n'.join([
-        'deps = {',
-        '  "a_dep": {',
-        '    "url": "a_url@a_rev",',
-        '    "condition": "some_condition",',
-        '  },',
-        '}',
-        'deps_os = {',
-        '  "mac": {',
-        '     "a_dep": "a_url@b_rev"',
-        '  },',
-        '}',
-      ]), '<unknown>')
-    self.assertIn('conflicts with existing deps', str(cm.exception))
-
-  def test_merges_hooks_os(self):
-    local_scope = gclient_eval.Parse('\n'.join([
-      'hooks = [',
-      '  {',
-      '    "action": ["a", "action"],',
-      '  },',
-      ']',
-      'hooks_os = {',
-      '  "mac": [',
-      '    {',
-      '       "action": ["b", "action"]',
-      '    },',
-      '  ]',
-      '}',
-    ]), '<unknown>')
-    self.assertEqual({
-        "hooks": [{"action": ["a", "action"]},
-                  {"action": ["b", "action"], "condition": "checkout_mac"}],
-    }, local_scope)
+    def callParse(self, vars_override=None):
+        return gclient_eval.Parse(
+            '\n'.join([
+                'vars = {',
+                '  "foo": "bar",',
+                '}',
+                'deps = {',
+                '  "a_dep": "a{foo}b",',
+                '}',
+            ]), '<unknown>', vars_override)
+
+    def test_supports_vars_inside_vars(self):
+        deps_file = '\n'.join([
+            'vars = {',
+            '  "foo": "bar",',
+            '  "baz": "\\"{foo}\\" == \\"bar\\"",',
+            '}',
+            'deps = {',
+            '  "src/baz": {',
+            '    "url": "baz_url",',
+            '    "condition": "baz",',
+            '  },',
+            '}',
+        ])
+        local_scope = gclient_eval.Parse(deps_file, '<unknown>', None)
+        self.assertEqual(
+            {
+                'vars': {
+                    'foo': 'bar',
+                    'baz': '"bar" == "bar"'
+                },
+                'deps': {
+                    'src/baz': {
+                        'url': 'baz_url',
+                        'dep_type': 'git',
+                        'condition': 'baz'
+                    }
+                },
+            }, local_scope)
+
+    def test_has_builtin_vars(self):
+        builtin_vars = {'builtin_var': 'foo'}
+        deps_file = '\n'.join([
+            'deps = {',
+            '  "a_dep": "a{builtin_var}b",',
+            '}',
+        ])
+        local_scope = gclient_eval.Parse(deps_file, '<unknown>', None,
+                                         builtin_vars)
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'afoob',
+                        'dep_type': 'git'
+                    }
+                },
+            }, local_scope)
+
+    def test_declaring_builtin_var_has_no_effect(self):
+        builtin_vars = {'builtin_var': 'foo'}
+        deps_file = '\n'.join([
+            'vars = {',
+            '  "builtin_var": "bar",',
+            '}',
+            'deps = {',
+            '  "a_dep": "a{builtin_var}b",',
+            '}',
+        ])
+        local_scope = gclient_eval.Parse(deps_file, '<unknown>', None,
+                                         builtin_vars)
+        self.assertEqual(
+            {
+                'vars': {
+                    'builtin_var': 'bar'
+                },
+                'deps': {
+                    'a_dep': {
+                        'url': 'afoob',
+                        'dep_type': 'git'
+                    }
+                },
+            }, local_scope)
+
+    def test_override_builtin_var(self):
+        builtin_vars = {'builtin_var': 'foo'}
+        vars_override = {'builtin_var': 'override'}
+        deps_file = '\n'.join([
+            'deps = {',
+            '  "a_dep": "a{builtin_var}b",',
+            '}',
+        ])
+        local_scope = gclient_eval.Parse(deps_file, '<unknown>', vars_override,
+                                         builtin_vars)
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'aoverrideb',
+                        'dep_type': 'git'
+                    }
+                },
+            }, local_scope, str(local_scope))
+
+    def test_expands_vars(self):
+        local_scope = self.callParse()
+        self.assertEqual(
+            {
+                'vars': {
+                    'foo': 'bar'
+                },
+                'deps': {
+                    'a_dep': {
+                        'url': 'abarb',
+                        'dep_type': 'git'
+                    }
+                },
+            }, local_scope)
+
+    def test_overrides_vars(self):
+        local_scope = self.callParse(vars_override={'foo': 'baz'})
+        self.assertEqual(
+            {
+                'vars': {
+                    'foo': 'bar'
+                },
+                'deps': {
+                    'a_dep': {
+                        'url': 'abazb',
+                        'dep_type': 'git'
+                    }
+                },
+            }, local_scope)
+
+    def test_no_extra_vars(self):
+        deps_file = '\n'.join([
+            'vars = {',
+            '  "foo": "bar",',
+            '}',
+            'deps = {',
+            '  "a_dep": "a{baz}b",',
+            '}',
+        ])
+
+        with self.assertRaises(KeyError) as cm:
+            gclient_eval.Parse(deps_file, '<unknown>', {'baz': 'lalala'})
+        self.assertIn('baz was used as a variable, but was not declared',
+                      str(cm.exception))
+
+    def test_standardizes_deps_string_dep(self):
+        local_scope = gclient_eval.Parse(
+            '\n'.join([
+                'deps = {',
+                '  "a_dep": "a_url@a_rev",',
+                '}',
+            ]), '<unknown>')
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'a_url@a_rev',
+                        'dep_type': 'git'
+                    }
+                },
+            }, local_scope)
+
+    def test_standardizes_deps_dict_dep(self):
+        local_scope = gclient_eval.Parse(
+            '\n'.join([
+                'deps = {',
+                '  "a_dep": {',
+                '     "url": "a_url@a_rev",',
+                '     "condition": "checkout_android",',
+                '  },',
+                '}',
+            ]), '<unknown>')
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'a_url@a_rev',
+                        'dep_type': 'git',
+                        'condition': 'checkout_android'
+                    }
+                },
+            }, local_scope)
+
+    def test_ignores_none_in_deps_os(self):
+        local_scope = gclient_eval.Parse(
+            '\n'.join([
+                'deps = {',
+                '  "a_dep": "a_url@a_rev",',
+                '}',
+                'deps_os = {',
+                '  "mac": {',
+                '     "a_dep": None,',
+                '  },',
+                '}',
+            ]), '<unknown>')
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'a_url@a_rev',
+                        'dep_type': 'git'
+                    }
+                },
+            }, local_scope)
+
+    def test_merges_deps_os_extra_dep(self):
+        local_scope = gclient_eval.Parse(
+            '\n'.join([
+                'deps = {',
+                '  "a_dep": "a_url@a_rev",',
+                '}',
+                'deps_os = {',
+                '  "mac": {',
+                '     "b_dep": "b_url@b_rev"',
+                '  },',
+                '}',
+            ]), '<unknown>')
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'a_url@a_rev',
+                        'dep_type': 'git'
+                    },
+                    'b_dep': {
+                        'url': 'b_url@b_rev',
+                        'dep_type': 'git',
+                        'condition': 'checkout_mac'
+                    }
+                },
+            }, local_scope)
+
+    def test_merges_deps_os_existing_dep_with_no_condition(self):
+        local_scope = gclient_eval.Parse(
+            '\n'.join([
+                'deps = {',
+                '  "a_dep": "a_url@a_rev",',
+                '}',
+                'deps_os = {',
+                '  "mac": {',
+                '     "a_dep": "a_url@a_rev"',
+                '  },',
+                '}',
+            ]), '<unknown>')
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'a_url@a_rev',
+                        'dep_type': 'git'
+                    }
+                },
+            }, local_scope)
+
+    def test_merges_deps_os_existing_dep_with_condition(self):
+        local_scope = gclient_eval.Parse(
+            '\n'.join([
+                'deps = {',
+                '  "a_dep": {',
+                '    "url": "a_url@a_rev",',
+                '    "condition": "some_condition",',
+                '  },',
+                '}',
+                'deps_os = {',
+                '  "mac": {',
+                '     "a_dep": "a_url@a_rev"',
+                '  },',
+                '}',
+            ]), '<unknown>')
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'a_url@a_rev',
+                        'dep_type': 'git',
+                        'condition': '(checkout_mac) or (some_condition)'
+                    },
+                },
+            }, local_scope)
+
+    def test_merges_deps_os_multiple_os(self):
+        local_scope = gclient_eval.Parse(
+            '\n'.join([
+                'deps_os = {',
+                '  "win": {'
+                '     "a_dep": "a_url@a_rev"',
+                '  },',
+                '  "mac": {',
+                '     "a_dep": "a_url@a_rev"',
+                '  },',
+                '}',
+            ]), '<unknown>')
+        self.assertEqual(
+            {
+                'deps': {
+                    'a_dep': {
+                        'url': 'a_url@a_rev',
+                        'dep_type': 'git',
+                        'condition': '(checkout_mac) or (checkout_win)'
+                    },
+                },
+            }, local_scope)
+
+    def test_fails_to_merge_same_dep_with_different_revisions(self):
+        with self.assertRaises(gclient_eval.gclient_utils.Error) as cm:
+            gclient_eval.Parse(
+                '\n'.join([
+                    'deps = {',
+                    '  "a_dep": {',
+                    '    "url": "a_url@a_rev",',
+                    '    "condition": "some_condition",',
+                    '  },',
+                    '}',
+                    'deps_os = {',
+                    '  "mac": {',
+                    '     "a_dep": "a_url@b_rev"',
+                    '  },',
+                    '}',
+                ]), '<unknown>')
+        self.assertIn('conflicts with existing deps', str(cm.exception))
+
+    def test_merges_hooks_os(self):
+        local_scope = gclient_eval.Parse(
+            '\n'.join([
+                'hooks = [',
+                '  {',
+                '    "action": ["a", "action"],',
+                '  },',
+                ']',
+                'hooks_os = {',
+                '  "mac": [',
+                '    {',
+                '       "action": ["b", "action"]',
+                '    },',
+                '  ]',
+                '}',
+            ]), '<unknown>')
+        self.assertEqual(
+            {
+                "hooks": [{
+                    "action": ["a", "action"]
+                }, {
+                    "action": ["b", "action"],
+                    "condition": "checkout_mac"
+                }],
+            }, local_scope)
 
 
 if __name__ == '__main__':
-  level = logging.DEBUG if '-v' in sys.argv else logging.FATAL
-  logging.basicConfig(
-      level=level,
-      format='%(asctime).19s %(levelname)s %(filename)s:'
-             '%(lineno)s %(message)s')
-  unittest.main()
+    level = logging.DEBUG if '-v' in sys.argv else logging.FATAL
+    logging.basicConfig(level=level,
+                        format='%(asctime).19s %(levelname)s %(filename)s:'
+                        '%(lineno)s %(message)s')
+    unittest.main()

+ 136 - 125
tests/gclient_git_mutates_smoketest.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2021 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Smoke tests for gclient.py.
 
 Shell out 'gclient' and run git tests.
@@ -23,130 +22,142 @@ from testing_support.fake_repos import join, write
 
 
 class GClientSmokeGITMutates(gclient_smoketest_base.GClientSmokeBase):
-  """testRevertAndStatus mutates the git repo so move it to its own suite."""
-  def setUp(self):
-    super(GClientSmokeGITMutates, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    if not self.enabled:
-      self.skipTest('git fake repos not available')
-
-  # TODO(crbug.com/1024683): Enable for windows.
-  @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
-  def testRevertAndStatus(self):
-    # Commit new change to repo to make repo_2's hash use a custom_var.
-    cur_deps = self.FAKE_REPOS.git_hashes['repo_1'][-1][1]['DEPS']
-    repo_2_hash = self.FAKE_REPOS.git_hashes['repo_2'][1][0][:7]
-    new_deps = cur_deps.replace('repo_2@%s\'' % repo_2_hash,
-                                'repo_2@\' + Var(\'r2hash\')')
-    new_deps = 'vars = {\'r2hash\': \'%s\'}\n%s' % (repo_2_hash, new_deps)
-    self.FAKE_REPOS._commit_git('repo_1', {  # pylint: disable=protected-access
-      'DEPS': new_deps,
-      'origin': 'git/repo_1@3\n',
-    })
-
-    config_template = ''.join([
-        'solutions = [{'
-        '  "name"        : "src",'
-        '  "url"         : %(git_base)r + "repo_1",'
-        '  "deps_file"   : "DEPS",'
-        '  "managed"     : True,'
-        '  "custom_vars" : %(custom_vars)s,'
-        '}]'])
-
-    self.gclient(['config', '--spec', config_template % {
-      'git_base': self.git_base,
-      'custom_vars': {}
-    }])
-
-    # Tested in testSync.
-    self.gclient(['sync', '--deps', 'mac'])
-    write(join(self.root_dir, 'src', 'repo2', 'hi'), 'Hey!')
-
-    out = self.parseGclient(['status', '--deps', 'mac', '--jobs', '1'], [])
-    # TODO(maruel): http://crosbug.com/3584 It should output the unversioned
-    # files.
-    self.assertEqual(0, len(out))
-
-    # Revert implies --force implies running hooks without looking at pattern
-    # matching. For each expected path, 'git reset' and 'git clean' are run, so
-    # there should be two results for each. The last two results should reflect
-    # writing git_hooked1 and git_hooked2. There's only one result for the third
-    # because it is clean and has no output for 'git clean'.
-    out = self.parseGclient(['revert', '--deps', 'mac', '--jobs', '1'],
-                            ['running', 'running'])
-    self.assertEqual(2, len(out))
-    tree = self.mangle_git_tree(('repo_1@3', 'src'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-
-    # Make a new commit object in the origin repo, to force reset to fetch.
-    self.FAKE_REPOS._commit_git('repo_2', {  # pylint: disable=protected-access
-      'origin': 'git/repo_2@3\n',
-    })
-
-    self.gclient(['config', '--spec', config_template % {
-      'git_base': self.git_base,
-      'custom_vars': {'r2hash': self.FAKE_REPOS.git_hashes['repo_2'][-1][0] }
-    }])
-    out = self.parseGclient(['revert', '--deps', 'mac', '--jobs', '1'],
-                            ['running', 'running'])
-    self.assertEqual(2, len(out))
-    tree = self.mangle_git_tree(('repo_1@3', 'src'),
-                                ('repo_2@3', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-
-    results = self.gclient(['status', '--deps', 'mac', '--jobs', '1'])
-    out = results[0].splitlines(False)
-    # TODO(maruel): http://crosbug.com/3584 It should output the unversioned
-    # files.
-    self.assertEqual(0, len(out))
-
-  def testSyncNoHistory(self):
-    # Create an extra commit in repo_2 and point DEPS to its hash.
-    cur_deps = self.FAKE_REPOS.git_hashes['repo_1'][-1][1]['DEPS']
-    repo_2_hash_old = self.FAKE_REPOS.git_hashes['repo_2'][1][0][:7]
-    self.FAKE_REPOS._commit_git('repo_2', {  # pylint: disable=protected-access
-      'last_file': 'file created in last commit',
-    })
-    repo_2_hash_new = self.FAKE_REPOS.git_hashes['repo_2'][-1][0]
-    new_deps = cur_deps.replace(repo_2_hash_old, repo_2_hash_new)
-    self.assertNotEqual(new_deps, cur_deps)
-    self.FAKE_REPOS._commit_git('repo_1', {  # pylint: disable=protected-access
-      'DEPS': new_deps,
-      'origin': 'git/repo_1@4\n',
-    })
-
-    config_template = ''.join([
-        'solutions = [{'
-        '  "name"        : "src",'
-        '  "url"         : %(git_base)r + "repo_1",'
-        '  "deps_file"   : "DEPS",'
-        '  "managed"     : True,'
-        '}]'])
-
-    self.gclient(['config', '--spec', config_template % {
-      'git_base': self.git_base
-    }])
-
-    self.gclient(['sync', '--no-history', '--deps', 'mac'])
-    repo2_root = join(self.root_dir, 'src', 'repo2')
-
-    # Check that repo_2 is actually shallow and its log has only one entry.
-    rev_lists = subprocess2.check_output(['git', 'rev-list', 'HEAD'],
-                                         cwd=repo2_root).decode('utf-8')
-    self.assertEqual(repo_2_hash_new, rev_lists.strip('\r\n'))
-
-    # Check that we have actually checked out the right commit.
-    self.assertTrue(os.path.exists(join(repo2_root, 'last_file')))
+    """testRevertAndStatus mutates the git repo so move it to its own suite."""
+    def setUp(self):
+        super(GClientSmokeGITMutates, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        if not self.enabled:
+            self.skipTest('git fake repos not available')
+
+    # TODO(crbug.com/1024683): Enable for windows.
+    @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
+    def testRevertAndStatus(self):
+        # Commit new change to repo to make repo_2's hash use a custom_var.
+        cur_deps = self.FAKE_REPOS.git_hashes['repo_1'][-1][1]['DEPS']
+        repo_2_hash = self.FAKE_REPOS.git_hashes['repo_2'][1][0][:7]
+        new_deps = cur_deps.replace('repo_2@%s\'' % repo_2_hash,
+                                    'repo_2@\' + Var(\'r2hash\')')
+        new_deps = 'vars = {\'r2hash\': \'%s\'}\n%s' % (repo_2_hash, new_deps)
+        self.FAKE_REPOS._commit_git('repo_1', {  # pylint: disable=protected-access
+          'DEPS': new_deps,
+          'origin': 'git/repo_1@3\n',
+        })
+
+        config_template = ''.join([
+            'solutions = [{'
+            '  "name"        : "src",'
+            '  "url"         : %(git_base)r + "repo_1",'
+            '  "deps_file"   : "DEPS",'
+            '  "managed"     : True,'
+            '  "custom_vars" : %(custom_vars)s,'
+            '}]'
+        ])
+
+        self.gclient([
+            'config', '--spec', config_template % {
+                'git_base': self.git_base,
+                'custom_vars': {}
+            }
+        ])
+
+        # Tested in testSync.
+        self.gclient(['sync', '--deps', 'mac'])
+        write(join(self.root_dir, 'src', 'repo2', 'hi'), 'Hey!')
+
+        out = self.parseGclient(['status', '--deps', 'mac', '--jobs', '1'], [])
+        # TODO(maruel): http://crosbug.com/3584 It should output the unversioned
+        # files.
+        self.assertEqual(0, len(out))
+
+        # Revert implies --force implies running hooks without looking at
+        # pattern matching. For each expected path, 'git reset' and 'git clean'
+        # are run, so there should be two results for each. The last two results
+        # should reflect writing git_hooked1 and git_hooked2. There's only one
+        # result for the third because it is clean and has no output for 'git
+        # clean'.
+        out = self.parseGclient(['revert', '--deps', 'mac', '--jobs', '1'],
+                                ['running', 'running'])
+        self.assertEqual(2, len(out))
+        tree = self.mangle_git_tree(('repo_1@3', 'src'),
+                                    ('repo_2@1', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+
+        # Make a new commit object in the origin repo, to force reset to fetch.
+        self.FAKE_REPOS._commit_git(
+            'repo_2',
+            {  # pylint: disable=protected-access
+                'origin': 'git/repo_2@3\n',
+            })
+
+        self.gclient([
+            'config', '--spec', config_template % {
+                'git_base': self.git_base,
+                'custom_vars': {
+                    'r2hash': self.FAKE_REPOS.git_hashes['repo_2'][-1][0]
+                }
+            }
+        ])
+        out = self.parseGclient(['revert', '--deps', 'mac', '--jobs', '1'],
+                                ['running', 'running'])
+        self.assertEqual(2, len(out))
+        tree = self.mangle_git_tree(('repo_1@3', 'src'),
+                                    ('repo_2@3', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+
+        results = self.gclient(['status', '--deps', 'mac', '--jobs', '1'])
+        out = results[0].splitlines(False)
+        # TODO(maruel): http://crosbug.com/3584 It should output the unversioned
+        # files.
+        self.assertEqual(0, len(out))
+
+    def testSyncNoHistory(self):
+        # Create an extra commit in repo_2 and point DEPS to its hash.
+        cur_deps = self.FAKE_REPOS.git_hashes['repo_1'][-1][1]['DEPS']
+        repo_2_hash_old = self.FAKE_REPOS.git_hashes['repo_2'][1][0][:7]
+        self.FAKE_REPOS._commit_git('repo_2', {  # pylint: disable=protected-access
+          'last_file': 'file created in last commit',
+        })
+        repo_2_hash_new = self.FAKE_REPOS.git_hashes['repo_2'][-1][0]
+        new_deps = cur_deps.replace(repo_2_hash_old, repo_2_hash_new)
+        self.assertNotEqual(new_deps, cur_deps)
+        self.FAKE_REPOS._commit_git('repo_1', {  # pylint: disable=protected-access
+          'DEPS': new_deps,
+          'origin': 'git/repo_1@4\n',
+        })
+
+        config_template = ''.join([
+            'solutions = [{'
+            '  "name"        : "src",'
+            '  "url"         : %(git_base)r + "repo_1",'
+            '  "deps_file"   : "DEPS",'
+            '  "managed"     : True,'
+            '}]'
+        ])
+
+        self.gclient(
+            ['config', '--spec', config_template % {
+                'git_base': self.git_base
+            }])
+
+        self.gclient(['sync', '--no-history', '--deps', 'mac'])
+        repo2_root = join(self.root_dir, 'src', 'repo2')
+
+        # Check that repo_2 is actually shallow and its log has only one entry.
+        rev_lists = subprocess2.check_output(['git', 'rev-list', 'HEAD'],
+                                             cwd=repo2_root).decode('utf-8')
+        self.assertEqual(repo_2_hash_new, rev_lists.strip('\r\n'))
+
+        # Check that we have actually checked out the right commit.
+        self.assertTrue(os.path.exists(join(repo2_root, 'last_file')))
 
 
 if __name__ == '__main__':
-  if '-v' in sys.argv:
-    logging.basicConfig(level=logging.DEBUG)
-  unittest.main()
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()

+ 1473 - 1418
tests/gclient_git_smoketest.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2020 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Smoke tests for gclient.py.
 
 Shell out 'gclient' and run git tests.
@@ -22,1436 +21,1492 @@ sys.path.insert(0, ROOT_DIR)
 import subprocess2
 from testing_support.fake_repos import join, write
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class GClientSmokeGIT(gclient_smoketest_base.GClientSmokeBase):
-  def setUp(self):
-    super(GClientSmokeGIT, self).setUp()
-    self.env['PATH'] = (os.path.join(ROOT_DIR, 'testing_support')
-                        + os.pathsep + self.env['PATH'])
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    if not self.enabled:
-      self.skipTest('git fake repos not available')
-
-  def testGitmodules_relative(self):
-    self.gclient(['config', self.git_base + 'repo_19', '--name', 'dir'],
-                 cwd=self.git_base + 'repo_19')
-    self.gclient(['sync'], cwd=self.git_base + 'repo_19')
-    self.gclient(['gitmodules'],
-                 cwd=self.git_base + os.path.join('repo_19', 'dir'))
-
-    gitmodules = os.path.join(self.git_base, 'repo_19', 'dir', '.gitmodules')
-    with open(gitmodules) as f:
-      contents = f.read().splitlines()
-      self.assertEqual([
-          '[submodule "some_repo"]', '\tpath = some_repo', '\turl = /repo_2',
-          '\tgclient-condition = not foo_checkout',
-          '[submodule "chicken/dickens"]', '\tpath = chicken/dickens',
-          '\turl = /repo_3'
-      ], contents)
-
-  def testGitmodules_not_relative(self):
-    self.gclient(['config', self.git_base + 'repo_20', '--name', 'foo'],
-                 cwd=self.git_base + 'repo_20')
-    self.gclient(['sync'], cwd=self.git_base + 'repo_20')
-    self.gclient(['gitmodules'],
-                 cwd=self.git_base + os.path.join('repo_20', 'foo'))
-
-    gitmodules = os.path.join(self.git_base, 'repo_20', 'foo', '.gitmodules')
-    with open(gitmodules) as f:
-      contents = f.read().splitlines()
-      self.assertEqual([
-          '[submodule "some_repo"]', '\tpath = some_repo', '\turl = /repo_2',
-          '\tgclient-condition = not foo_checkout',
-          '[submodule "chicken/dickens"]', '\tpath = chicken/dickens',
-          '\turl = /repo_3'
-      ], contents)
-
-  def testGitmodules_not_in_gclient(self):
-    with self.assertRaisesRegex(AssertionError, 'from a gclient workspace'):
-      self.gclient(['gitmodules'], cwd=self.root_dir)
-
-  def testSync(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    # Test unversioned checkout.
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '1'],
-        ['running', 'running', 'running'])
-    # TODO(maruel): http://crosbug.com/3582 hooks run even if not matching, must
-    # add sync parsing to get the list of updated files.
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-
-    # Manually remove git_hooked1 before syncing to make sure it's not
-    # recreated.
-    os.remove(join(self.root_dir, 'src', 'git_hooked1'))
-
-    # Test incremental versioned sync: sync backward.
-    self.parseGclient(
-        ['sync', '--jobs', '1', '--revision',
-        'src@' + self.githash('repo_1', 1),
-        '--deps', 'mac', '--delete_unversioned_trees'],
-        ['deleting'])
-    tree = self.mangle_git_tree(('repo_1@1', 'src'),
-                                ('repo_2@2', 'src/repo2'),
-                                ('repo_3@1', 'src/repo2/repo3'),
-                                ('repo_4@2', 'src/repo4'))
-    tree['src/git_hooked2'] = 'git_hooked2'
-    tree['src/repo2/gclient.args'] = '\n'.join([
-        '# Generated from \'DEPS\'',
-        'false_var = false',
-        'false_str_var = false',
-        'true_var = true',
-        'true_str_var = true',
-        'str_var = "abc"',
-        'cond_var = false',
-    ])
-    self.assertTree(tree)
-    # Test incremental sync: delete-unversioned_trees isn't there.
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '1'],
-        ['running', 'running'])
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@1', 'src/repo2/repo3'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'),
-                                ('repo_4@2', 'src/repo4'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    tree['src/repo2/gclient.args'] = '\n'.join([
-        '# Generated from \'DEPS\'',
-        'false_var = false',
-        'false_str_var = false',
-        'true_var = true',
-        'true_str_var = true',
-        'str_var = "abc"',
-        'cond_var = false',
-    ])
-    self.assertTree(tree)
-
-  def testSyncJsonOutput(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    output_json = os.path.join(self.root_dir, 'output.json')
-    self.gclient(['sync', '--deps', 'mac', '--output-json', output_json])
-    with open(output_json) as f:
-      output_json = json.load(f)
-
-    out = {
-        'solutions': {
-            'src/': {
-                'scm': 'git',
+    def setUp(self):
+        super(GClientSmokeGIT, self).setUp()
+        self.env['PATH'] = (os.path.join(ROOT_DIR, 'testing_support') +
+                            os.pathsep + self.env['PATH'])
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        if not self.enabled:
+            self.skipTest('git fake repos not available')
+
+    def testGitmodules_relative(self):
+        self.gclient(['config', self.git_base + 'repo_19', '--name', 'dir'],
+                     cwd=self.git_base + 'repo_19')
+        self.gclient(['sync'], cwd=self.git_base + 'repo_19')
+        self.gclient(['gitmodules'],
+                     cwd=self.git_base + os.path.join('repo_19', 'dir'))
+
+        gitmodules = os.path.join(self.git_base, 'repo_19', 'dir',
+                                  '.gitmodules')
+        with open(gitmodules) as f:
+            contents = f.read().splitlines()
+            self.assertEqual([
+                '[submodule "some_repo"]', '\tpath = some_repo',
+                '\turl = /repo_2', '\tgclient-condition = not foo_checkout',
+                '[submodule "chicken/dickens"]', '\tpath = chicken/dickens',
+                '\turl = /repo_3'
+            ], contents)
+
+    def testGitmodules_not_relative(self):
+        self.gclient(['config', self.git_base + 'repo_20', '--name', 'foo'],
+                     cwd=self.git_base + 'repo_20')
+        self.gclient(['sync'], cwd=self.git_base + 'repo_20')
+        self.gclient(['gitmodules'],
+                     cwd=self.git_base + os.path.join('repo_20', 'foo'))
+
+        gitmodules = os.path.join(self.git_base, 'repo_20', 'foo',
+                                  '.gitmodules')
+        with open(gitmodules) as f:
+            contents = f.read().splitlines()
+            self.assertEqual([
+                '[submodule "some_repo"]', '\tpath = some_repo',
+                '\turl = /repo_2', '\tgclient-condition = not foo_checkout',
+                '[submodule "chicken/dickens"]', '\tpath = chicken/dickens',
+                '\turl = /repo_3'
+            ], contents)
+
+    def testGitmodules_not_in_gclient(self):
+        with self.assertRaisesRegex(AssertionError, 'from a gclient workspace'):
+            self.gclient(['gitmodules'], cwd=self.root_dir)
+
+    def testSync(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        # Test unversioned checkout.
+        self.parseGclient(['sync', '--deps', 'mac', '--jobs', '1'],
+                          ['running', 'running', 'running'])
+        # TODO(maruel): http://crosbug.com/3582 hooks run even if not matching,
+        # must add sync parsing to get the list of updated files.
+        tree = self.mangle_git_tree(('repo_1@2', 'src'),
+                                    ('repo_2@1', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+
+        # Manually remove git_hooked1 before syncing to make sure it's not
+        # recreated.
+        os.remove(join(self.root_dir, 'src', 'git_hooked1'))
+
+        # Test incremental versioned sync: sync backward.
+        self.parseGclient([
+            'sync', '--jobs', '1', '--revision',
+            'src@' + self.githash('repo_1', 1), '--deps', 'mac',
+            '--delete_unversioned_trees'
+        ], ['deleting'])
+        tree = self.mangle_git_tree(
+            ('repo_1@1', 'src'), ('repo_2@2', 'src/repo2'),
+            ('repo_3@1', 'src/repo2/repo3'), ('repo_4@2', 'src/repo4'))
+        tree['src/git_hooked2'] = 'git_hooked2'
+        tree['src/repo2/gclient.args'] = '\n'.join([
+            '# Generated from \'DEPS\'',
+            'false_var = false',
+            'false_str_var = false',
+            'true_var = true',
+            'true_str_var = true',
+            'str_var = "abc"',
+            'cond_var = false',
+        ])
+        self.assertTree(tree)
+        # Test incremental sync: delete-unversioned_trees isn't there.
+        self.parseGclient(['sync', '--deps', 'mac', '--jobs', '1'],
+                          ['running', 'running'])
+        tree = self.mangle_git_tree(
+            ('repo_1@2', 'src'), ('repo_2@1', 'src/repo2'),
+            ('repo_3@1', 'src/repo2/repo3'),
+            ('repo_3@2', 'src/repo2/repo_renamed'), ('repo_4@2', 'src/repo4'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        tree['src/repo2/gclient.args'] = '\n'.join([
+            '# Generated from \'DEPS\'',
+            'false_var = false',
+            'false_str_var = false',
+            'true_var = true',
+            'true_str_var = true',
+            'str_var = "abc"',
+            'cond_var = false',
+        ])
+        self.assertTree(tree)
+
+    def testSyncJsonOutput(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        output_json = os.path.join(self.root_dir, 'output.json')
+        self.gclient(['sync', '--deps', 'mac', '--output-json', output_json])
+        with open(output_json) as f:
+            output_json = json.load(f)
+
+        out = {
+            'solutions': {
+                'src/': {
+                    'scm': 'git',
+                    'url': self.git_base + 'repo_1',
+                    'revision': self.githash('repo_1', 2),
+                    'was_processed': True,
+                    'was_synced': True,
+                },
+                'src/repo2/': {
+                    'scm': 'git',
+                    'url':
+                    self.git_base + 'repo_2@' + self.githash('repo_2', 1)[:7],
+                    'revision': self.githash('repo_2', 1),
+                    'was_processed': True,
+                    'was_synced': True,
+                },
+                'src/repo2/repo_renamed/': {
+                    'scm': 'git',
+                    'url': self.git_base + 'repo_3',
+                    'revision': self.githash('repo_3', 2),
+                    'was_processed': True,
+                    'was_synced': True,
+                },
+                'src/should_not_process/': {
+                    'scm': None,
+                    'url': self.git_base + 'repo_4',
+                    'revision': None,
+                    'was_processed': False,
+                    'was_synced': True,
+                },
+            },
+        }
+        self.assertEqual(out, output_json)
+
+    def testSyncIgnoredSolutionName(self):
+        """TODO(maruel): This will become an error soon."""
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.parseGclient(
+            [
+                'sync', '--deps', 'mac', '--jobs', '1', '--revision',
+                'invalid@' + self.githash('repo_1', 1)
+            ], ['running', 'running', 'running'],
+            'Please fix your script, having invalid --revision flags '
+            'will soon be considered an error.\n')
+        tree = self.mangle_git_tree(('repo_1@2', 'src'),
+                                    ('repo_2@1', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+
+    def testSyncNoSolutionName(self):
+        # When no solution name is provided, gclient uses the first solution
+        # listed.
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.parseGclient([
+            'sync', '--deps', 'mac', '--jobs', '1', '--revision',
+            self.githash('repo_1', 1)
+        ], ['running'])
+        tree = self.mangle_git_tree(
+            ('repo_1@1', 'src'), ('repo_2@2', 'src/repo2'),
+            ('repo_3@1', 'src/repo2/repo3'), ('repo_4@2', 'src/repo4'))
+        tree['src/repo2/gclient.args'] = '\n'.join([
+            '# Generated from \'DEPS\'',
+            'false_var = false',
+            'false_str_var = false',
+            'true_var = true',
+            'true_str_var = true',
+            'str_var = "abc"',
+            'cond_var = false',
+        ])
+        self.assertTree(tree)
+
+    def testSyncJobs(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        # Test unversioned checkout.
+        self.parseGclient(['sync', '--deps', 'mac', '--jobs', '8'],
+                          ['running', 'running', 'running'],
+                          untangle=True)
+        # TODO(maruel): http://crosbug.com/3582 hooks run even if not matching,
+        # must add sync parsing to get the list of updated files.
+        tree = self.mangle_git_tree(('repo_1@2', 'src'),
+                                    ('repo_2@1', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+
+        # Manually remove git_hooked1 before syncing to make sure it's not
+        # recreated.
+        os.remove(join(self.root_dir, 'src', 'git_hooked1'))
+
+        # Test incremental versioned sync: sync backward.
+        # Use --jobs 1 otherwise the order is not deterministic.
+        self.parseGclient([
+            'sync', '--revision', 'src@' + self.githash('repo_1', 1), '--deps',
+            'mac', '--delete_unversioned_trees', '--jobs', '1'
+        ], ['deleting'],
+                          untangle=True)
+        tree = self.mangle_git_tree(
+            ('repo_1@1', 'src'), ('repo_2@2', 'src/repo2'),
+            ('repo_3@1', 'src/repo2/repo3'), ('repo_4@2', 'src/repo4'))
+        tree['src/git_hooked2'] = 'git_hooked2'
+        tree['src/repo2/gclient.args'] = '\n'.join([
+            '# Generated from \'DEPS\'',
+            'false_var = false',
+            'false_str_var = false',
+            'true_var = true',
+            'true_str_var = true',
+            'str_var = "abc"',
+            'cond_var = false',
+        ])
+        self.assertTree(tree)
+        # Test incremental sync: delete-unversioned_trees isn't there.
+        self.parseGclient(['sync', '--deps', 'mac', '--jobs', '8'],
+                          ['running', 'running'],
+                          untangle=True)
+        tree = self.mangle_git_tree(
+            ('repo_1@2', 'src'), ('repo_2@1', 'src/repo2'),
+            ('repo_3@1', 'src/repo2/repo3'),
+            ('repo_3@2', 'src/repo2/repo_renamed'), ('repo_4@2', 'src/repo4'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        tree['src/repo2/gclient.args'] = '\n'.join([
+            '# Generated from \'DEPS\'',
+            'false_var = false',
+            'false_str_var = false',
+            'true_var = true',
+            'true_str_var = true',
+            'str_var = "abc"',
+            'cond_var = false',
+        ])
+        self.assertTree(tree)
+
+    def testSyncFetch(self):
+        self.gclient(['config', self.git_base + 'repo_13', '--name', 'src'])
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_13', 2)
+        ])
+
+    def testSyncFetchUpdate(self):
+        self.gclient(['config', self.git_base + 'repo_13', '--name', 'src'])
+
+        # Sync to an earlier revision first, one that doesn't refer to
+        # non-standard refs.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_13', 1)
+        ])
+
+        # Make sure update that pulls a non-standard ref works.
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            self.githash('repo_13', 2)
+        ])
+
+    def testSyncDirect(self):
+        self.gclient(['config', self.git_base + 'repo_12', '--name', 'src'])
+        self.gclient(
+            ['sync', '-v', '-v', '-v', '--revision', 'refs/changes/1212'])
+
+    def testSyncUnmanaged(self):
+        self.gclient([
+            'config', '--spec',
+            'solutions=[{"name":"src", "url": %r, "managed": False}]' %
+            (self.git_base + 'repo_5')
+        ])
+        self.gclient(['sync', '--revision', 'src@' + self.githash('repo_5', 2)])
+        self.gclient(
+            ['sync', '--revision',
+             'src/repo1@%s' % self.githash('repo_1', 1)])
+        # src is unmanaged, so gclient shouldn't have updated it. It should've
+        # stayed synced at @2
+        tree = self.mangle_git_tree(('repo_5@2', 'src'),
+                                    ('repo_1@1', 'src/repo1'),
+                                    ('repo_2@1', 'src/repo2'))
+        tree['src/git_pre_deps_hooked'] = 'git_pre_deps_hooked'
+        self.assertTree(tree)
+
+    def testSyncUrl(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient([
+            'sync', '-v', '-v', '-v', '--revision',
+            'src/repo2@%s' % self.githash('repo_2', 1), '--revision',
+            '%srepo_2@%s' % (self.git_base, self.githash('repo_2', 2))
+        ])
+        # repo_2 should've been synced to @2 instead of @1, since URLs override
+        # paths.
+        tree = self.mangle_git_tree(('repo_1@2', 'src'),
+                                    ('repo_2@2', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+
+    def testSyncPatchRef(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient([
+            'sync',
+            '-v',
+            '-v',
+            '-v',
+            '--revision',
+            'src/repo2@%s' % self.githash('repo_2', 1),
+            '--patch-ref',
+            '%srepo_2@refs/heads/main:%s' %
+            (self.git_base, self.githash('repo_2', 2)),
+        ])
+        # Assert that repo_2 files coincide with revision @2 (the patch ref)
+        tree = self.mangle_git_tree(('repo_1@2', 'src'),
+                                    ('repo_2@2', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+        # Assert that HEAD revision of repo_2 is @1 (the base we synced to)
+        # since we should have done a soft reset.
+        self.assertEqual(
+            self.githash('repo_2', 1),
+            self.gitrevparse(os.path.join(self.root_dir, 'src/repo2')))
+
+    def testSyncPatchRefNoHooks(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient([
+            'sync',
+            '-v',
+            '-v',
+            '-v',
+            '--revision',
+            'src/repo2@%s' % self.githash('repo_2', 1),
+            '--patch-ref',
+            '%srepo_2@refs/heads/main:%s' %
+            (self.git_base, self.githash('repo_2', 2)),
+            '--nohooks',
+        ])
+        # Assert that repo_2 files coincide with revision @2 (the patch ref)
+        tree = self.mangle_git_tree(('repo_1@2', 'src'),
+                                    ('repo_2@2', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        self.assertTree(tree)
+        # Assert that HEAD revision of repo_2 is @1 (the base we synced to)
+        # since we should have done a soft reset.
+        self.assertEqual(
+            self.githash('repo_2', 1),
+            self.gitrevparse(os.path.join(self.root_dir, 'src/repo2')))
+
+    def testRunHooks(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        tree = self.mangle_git_tree(('repo_1@2', 'src'),
+                                    ('repo_2@1', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+
+        os.remove(join(self.root_dir, 'src', 'git_hooked1'))
+        os.remove(join(self.root_dir, 'src', 'git_hooked2'))
+        # runhooks runs all hooks even if not matching by design.
+        out = self.parseGclient(['runhooks', '--deps', 'mac'],
+                                ['running', 'running'])
+        self.assertEqual(1, len(out[0]))
+        self.assertEqual(1, len(out[1]))
+        tree = self.mangle_git_tree(('repo_1@2', 'src'),
+                                    ('repo_2@1', 'src/repo2'),
+                                    ('repo_3@2', 'src/repo2/repo_renamed'))
+        tree['src/git_hooked1'] = 'git_hooked1'
+        tree['src/git_hooked2'] = 'git_hooked2'
+        self.assertTree(tree)
+
+    def testRunHooksCondition(self):
+        self.gclient(['config', self.git_base + 'repo_7', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        tree = self.mangle_git_tree(('repo_7@1', 'src'))
+        tree['src/should_run'] = 'should_run'
+        self.assertTree(tree)
+
+    def testPreDepsHooks(self):
+        self.gclient(['config', self.git_base + 'repo_5', '--name', 'src'])
+        expectation = [
+            ('running', self.root_dir),  # git clone
+            ('running', self.root_dir),  # pre-deps hook
+        ]
+        out = self.parseGclient([
+            'sync', '--deps', 'mac', '--jobs=1', '--revision',
+            'src@' + self.githash('repo_5', 2)
+        ], expectation)
+        self.assertEqual('Cloning into ', out[0][1][:13])
+        # parseGClient may produce hook slowness warning, so we expect either 2
+        # or 3 blocks.
+        self.assertIn(len(out[1]), [2, 3], out[1])
+        self.assertEqual('pre-deps hook', out[1][1])
+        tree = self.mangle_git_tree(('repo_5@2', 'src'),
+                                    ('repo_1@2', 'src/repo1'),
+                                    ('repo_2@1', 'src/repo2'))
+        tree['src/git_pre_deps_hooked'] = 'git_pre_deps_hooked'
+        self.assertTree(tree)
+
+        os.remove(join(self.root_dir, 'src', 'git_pre_deps_hooked'))
+
+        # Pre-DEPS hooks don't run with runhooks.
+        self.gclient(['runhooks', '--deps', 'mac'])
+        tree = self.mangle_git_tree(('repo_5@2', 'src'),
+                                    ('repo_1@2', 'src/repo1'),
+                                    ('repo_2@1', 'src/repo2'))
+        self.assertTree(tree)
+
+        # Pre-DEPS hooks run when syncing with --nohooks.
+        self.gclient([
+            'sync', '--deps', 'mac', '--nohooks', '--revision',
+            'src@' + self.githash('repo_5', 2)
+        ])
+        tree = self.mangle_git_tree(('repo_5@2', 'src'),
+                                    ('repo_1@2', 'src/repo1'),
+                                    ('repo_2@1', 'src/repo2'))
+        tree['src/git_pre_deps_hooked'] = 'git_pre_deps_hooked'
+        self.assertTree(tree)
+
+        os.remove(join(self.root_dir, 'src', 'git_pre_deps_hooked'))
+
+        # Pre-DEPS hooks don't run with --noprehooks
+        self.gclient([
+            'sync', '--deps', 'mac', '--noprehooks', '--revision',
+            'src@' + self.githash('repo_5', 2)
+        ])
+        tree = self.mangle_git_tree(('repo_5@2', 'src'),
+                                    ('repo_1@2', 'src/repo1'),
+                                    ('repo_2@1', 'src/repo2'))
+        self.assertTree(tree)
+
+    def testPreDepsHooksError(self):
+        self.gclient(['config', self.git_base + 'repo_5', '--name', 'src'])
+        expectated_stdout = [
+            ('running', self.root_dir),  # git clone
+            ('running', self.root_dir),  # pre-deps hook
+            ('running', self.root_dir),  # pre-deps hook (fails)
+        ]
+        expected_stderr = (
+            "Error: Command 'python3 -c import sys; "
+            "sys.exit(1)' returned non-zero exit status 1 in %s\n" %
+            (self.root_dir))
+        stdout, stderr, retcode = self.gclient([
+            'sync', '--deps', 'mac', '--jobs=1', '--revision',
+            'src@' + self.githash('repo_5', 3)
+        ],
+                                               error_ok=True)
+        self.assertEqual(stderr, expected_stderr)
+        self.assertEqual(2, retcode)
+        self.checkBlock(stdout, expectated_stdout)
+
+    def testRevInfo(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        results = self.gclient(['revinfo', '--deps', 'mac'])
+        out = ('src: %(base)srepo_1\n'
+               'src/repo2: %(base)srepo_2@%(hash2)s\n'
+               'src/repo2/repo_renamed: %(base)srepo_3\n' % {
+                   'base': self.git_base,
+                   'hash2': self.githash('repo_2', 1)[:7],
+               })
+        self.check((out, '', 0), results)
+
+    def testRevInfoActual(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        results = self.gclient(['revinfo', '--deps', 'mac', '--actual'])
+        out = ('src: %(base)srepo_1@%(hash1)s\n'
+               'src/repo2: %(base)srepo_2@%(hash2)s\n'
+               'src/repo2/repo_renamed: %(base)srepo_3@%(hash3)s\n' % {
+                   'base': self.git_base,
+                   'hash1': self.githash('repo_1', 2),
+                   'hash2': self.githash('repo_2', 1),
+                   'hash3': self.githash('repo_3', 2),
+               })
+        self.check((out, '', 0), results)
+
+    def testRevInfoFilterPath(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        results = self.gclient(['revinfo', '--deps', 'mac', '--filter', 'src'])
+        out = ('src: %(base)srepo_1\n' % {
+            'base': self.git_base,
+        })
+        self.check((out, '', 0), results)
+
+    def testRevInfoFilterURL(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        results = self.gclient([
+            'revinfo', '--deps', 'mac', '--filter',
+            '%srepo_2' % self.git_base
+        ])
+        out = ('src/repo2: %(base)srepo_2@%(hash2)s\n' % {
+            'base': self.git_base,
+            'hash2': self.githash('repo_2', 1)[:7],
+        })
+        self.check((out, '', 0), results)
+
+    def testRevInfoFilterURLOrPath(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        results = self.gclient([
+            'revinfo', '--deps', 'mac', '--filter', 'src', '--filter',
+            '%srepo_2' % self.git_base
+        ])
+        out = ('src: %(base)srepo_1\n'
+               'src/repo2: %(base)srepo_2@%(hash2)s\n' % {
+                   'base': self.git_base,
+                   'hash2': self.githash('repo_2', 1)[:7],
+               })
+        self.check((out, '', 0), results)
+
+    def testRevInfoJsonOutput(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        output_json = os.path.join(self.root_dir, 'output.json')
+        self.gclient(['revinfo', '--deps', 'mac', '--output-json', output_json])
+        with open(output_json) as f:
+            output_json = json.load(f)
+
+        out = {
+            'src': {
                 'url': self.git_base + 'repo_1',
-                'revision': self.githash('repo_1', 2),
-                'was_processed': True,
-                'was_synced': True,
+                'rev': None,
             },
-            'src/repo2/': {
-                'scm': 'git',
-                'url':
-                self.git_base + 'repo_2@' + self.githash('repo_2', 1)[:7],
-                'revision': self.githash('repo_2', 1),
-                'was_processed': True,
-                'was_synced': True,
+            'src/repo2': {
+                'url': self.git_base + 'repo_2',
+                'rev': self.githash('repo_2', 1)[:7],
             },
-            'src/repo2/repo_renamed/': {
-                'scm': 'git',
+            'src/repo2/repo_renamed': {
                 'url': self.git_base + 'repo_3',
-                'revision': self.githash('repo_3', 2),
-                'was_processed': True,
-                'was_synced': True,
+                'rev': None,
             },
-            'src/should_not_process/': {
-                'scm': None,
-                'url': self.git_base + 'repo_4',
-                'revision': None,
-                'was_processed': False,
-                'was_synced': True,
+        }
+        self.assertEqual(out, output_json)
+
+    def testRevInfoJsonOutputSnapshot(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        self.gclient(['sync', '--deps', 'mac'])
+        output_json = os.path.join(self.root_dir, 'output.json')
+        self.gclient([
+            'revinfo', '--deps', 'mac', '--snapshot', '--output-json',
+            output_json
+        ])
+        with open(output_json) as f:
+            output_json = json.load(f)
+
+        out = [{
+            'solution_url': self.git_base + 'repo_1',
+            'managed': True,
+            'name': 'src',
+            'deps_file': 'DEPS',
+            'custom_deps': {
+                'src/repo2':
+                '%srepo_2@%s' % (self.git_base, self.githash('repo_2', 1)),
+                'src/repo2/repo_renamed':
+                '%srepo_3@%s' % (self.git_base, self.githash('repo_3', 2)),
+                'src/should_not_process':
+                None,
             },
-        },
-    }
-    self.assertEqual(out, output_json)
-
-  def testSyncIgnoredSolutionName(self):
-    """TODO(maruel): This will become an error soon."""
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '1',
-         '--revision', 'invalid@' + self.githash('repo_1', 1)],
-        ['running', 'running', 'running'],
-        'Please fix your script, having invalid --revision flags '
-        'will soon be considered an error.\n')
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-
-  def testSyncNoSolutionName(self):
-    # When no solution name is provided, gclient uses the first solution listed.
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '1',
-         '--revision', self.githash('repo_1', 1)],
-        ['running'])
-    tree = self.mangle_git_tree(('repo_1@1', 'src'),
-                                ('repo_2@2', 'src/repo2'),
-                                ('repo_3@1', 'src/repo2/repo3'),
-                                ('repo_4@2', 'src/repo4'))
-    tree['src/repo2/gclient.args'] = '\n'.join([
-        '# Generated from \'DEPS\'',
-        'false_var = false',
-        'false_str_var = false',
-        'true_var = true',
-        'true_str_var = true',
-        'str_var = "abc"',
-        'cond_var = false',
-    ])
-    self.assertTree(tree)
-
-  def testSyncJobs(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    # Test unversioned checkout.
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '8'],
-        ['running', 'running', 'running'],
-        untangle=True)
-    # TODO(maruel): http://crosbug.com/3582 hooks run even if not matching, must
-    # add sync parsing to get the list of updated files.
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-
-    # Manually remove git_hooked1 before syncing to make sure it's not
-    # recreated.
-    os.remove(join(self.root_dir, 'src', 'git_hooked1'))
-
-    # Test incremental versioned sync: sync backward.
-    # Use --jobs 1 otherwise the order is not deterministic.
-    self.parseGclient(
-        ['sync', '--revision', 'src@' + self.githash('repo_1', 1),
-          '--deps', 'mac', '--delete_unversioned_trees', '--jobs', '1'],
-        ['deleting'],
-        untangle=True)
-    tree = self.mangle_git_tree(('repo_1@1', 'src'),
-                                ('repo_2@2', 'src/repo2'),
-                                ('repo_3@1', 'src/repo2/repo3'),
-                                ('repo_4@2', 'src/repo4'))
-    tree['src/git_hooked2'] = 'git_hooked2'
-    tree['src/repo2/gclient.args'] = '\n'.join([
-        '# Generated from \'DEPS\'',
-        'false_var = false',
-        'false_str_var = false',
-        'true_var = true',
-        'true_str_var = true',
-        'str_var = "abc"',
-        'cond_var = false',
-    ])
-    self.assertTree(tree)
-    # Test incremental sync: delete-unversioned_trees isn't there.
-    self.parseGclient(
-        ['sync', '--deps', 'mac', '--jobs', '8'],
-        ['running', 'running'],
-        untangle=True)
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@1', 'src/repo2/repo3'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'),
-                                ('repo_4@2', 'src/repo4'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    tree['src/repo2/gclient.args'] = '\n'.join([
-        '# Generated from \'DEPS\'',
-        'false_var = false',
-        'false_str_var = false',
-        'true_var = true',
-        'true_str_var = true',
-        'str_var = "abc"',
-        'cond_var = false',
-    ])
-    self.assertTree(tree)
-
-  def testSyncFetch(self):
-    self.gclient(['config', self.git_base + 'repo_13', '--name', 'src'])
-    self.gclient(
-        ['sync', '-v', '-v', '-v', '--revision', self.githash('repo_13', 2)])
-
-  def testSyncFetchUpdate(self):
-    self.gclient(['config', self.git_base + 'repo_13', '--name', 'src'])
-
-    # Sync to an earlier revision first, one that doesn't refer to
-    # non-standard refs.
-    self.gclient(
-        ['sync', '-v', '-v', '-v', '--revision', self.githash('repo_13', 1)])
-
-    # Make sure update that pulls a non-standard ref works.
-    self.gclient(
-        ['sync', '-v', '-v', '-v', '--revision', self.githash('repo_13', 2)])
-
-  def testSyncDirect(self):
-    self.gclient(['config', self.git_base + 'repo_12', '--name', 'src'])
-    self.gclient(
-        ['sync', '-v', '-v', '-v', '--revision', 'refs/changes/1212'])
-
-  def testSyncUnmanaged(self):
-    self.gclient([
-        'config', '--spec',
-        'solutions=[{"name":"src", "url": %r, "managed": False}]' % (
-            self.git_base + 'repo_5')])
-    self.gclient([
-        'sync', '--revision', 'src@' + self.githash('repo_5', 2)])
-    self.gclient([
-        'sync', '--revision', 'src/repo1@%s' % self.githash('repo_1', 1)])
-    # src is unmanaged, so gclient shouldn't have updated it. It should've
-    # stayed synced at @2
-    tree = self.mangle_git_tree(('repo_5@2', 'src'),
-                                ('repo_1@1', 'src/repo1'),
-                                ('repo_2@1', 'src/repo2'))
-    tree['src/git_pre_deps_hooked'] = 'git_pre_deps_hooked'
-    self.assertTree(tree)
-
-  def testSyncUrl(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient([
-        'sync', '-v', '-v', '-v',
-        '--revision', 'src/repo2@%s' % self.githash('repo_2', 1),
-        '--revision', '%srepo_2@%s' % (self.git_base, self.githash('repo_2', 2))
-    ])
-    # repo_2 should've been synced to @2 instead of @1, since URLs override
-    # paths.
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@2', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-
-  def testSyncPatchRef(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient([
-        'sync', '-v', '-v', '-v',
-        '--revision', 'src/repo2@%s' % self.githash('repo_2', 1),
-        '--patch-ref',
-        '%srepo_2@refs/heads/main:%s' % (
-            self.git_base, self.githash('repo_2', 2)),
-    ])
-    # Assert that repo_2 files coincide with revision @2 (the patch ref)
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@2', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-    # Assert that HEAD revision of repo_2 is @1 (the base we synced to) since we
-    # should have done a soft reset.
-    self.assertEqual(
-        self.githash('repo_2', 1),
-        self.gitrevparse(os.path.join(self.root_dir, 'src/repo2')))
-
-  def testSyncPatchRefNoHooks(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient([
-        'sync', '-v', '-v', '-v',
-        '--revision', 'src/repo2@%s' % self.githash('repo_2', 1),
-        '--patch-ref',
-        '%srepo_2@refs/heads/main:%s' % (
-            self.git_base, self.githash('repo_2', 2)),
-        '--nohooks',
-    ])
-    # Assert that repo_2 files coincide with revision @2 (the patch ref)
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@2', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    self.assertTree(tree)
-    # Assert that HEAD revision of repo_2 is @1 (the base we synced to) since we
-    # should have done a soft reset.
-    self.assertEqual(
-        self.githash('repo_2', 1),
-        self.gitrevparse(os.path.join(self.root_dir, 'src/repo2')))
-
-  def testRunHooks(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-
-    os.remove(join(self.root_dir, 'src', 'git_hooked1'))
-    os.remove(join(self.root_dir, 'src', 'git_hooked2'))
-    # runhooks runs all hooks even if not matching by design.
-    out = self.parseGclient(['runhooks', '--deps', 'mac'],
-                            ['running', 'running'])
-    self.assertEqual(1, len(out[0]))
-    self.assertEqual(1, len(out[1]))
-    tree = self.mangle_git_tree(('repo_1@2', 'src'),
-                                ('repo_2@1', 'src/repo2'),
-                                ('repo_3@2', 'src/repo2/repo_renamed'))
-    tree['src/git_hooked1'] = 'git_hooked1'
-    tree['src/git_hooked2'] = 'git_hooked2'
-    self.assertTree(tree)
-
-  def testRunHooksCondition(self):
-    self.gclient(['config', self.git_base + 'repo_7', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    tree = self.mangle_git_tree(('repo_7@1', 'src'))
-    tree['src/should_run'] = 'should_run'
-    self.assertTree(tree)
-
-  def testPreDepsHooks(self):
-    self.gclient(['config', self.git_base + 'repo_5', '--name', 'src'])
-    expectation = [
-        ('running', self.root_dir),                 # git clone
-        ('running', self.root_dir),                 # pre-deps hook
-    ]
-    out = self.parseGclient(['sync', '--deps', 'mac', '--jobs=1',
-                             '--revision', 'src@' + self.githash('repo_5', 2)],
-                            expectation)
-    self.assertEqual('Cloning into ', out[0][1][:13])
-    # parseGClient may produce hook slowness warning, so we expect either 2 or 3
-    # blocks.
-    self.assertIn(len(out[1]), [2, 3], out[1])
-    self.assertEqual('pre-deps hook', out[1][1])
-    tree = self.mangle_git_tree(('repo_5@2', 'src'),
-                                ('repo_1@2', 'src/repo1'),
-                                ('repo_2@1', 'src/repo2')
-                                )
-    tree['src/git_pre_deps_hooked'] = 'git_pre_deps_hooked'
-    self.assertTree(tree)
-
-    os.remove(join(self.root_dir, 'src', 'git_pre_deps_hooked'))
-
-    # Pre-DEPS hooks don't run with runhooks.
-    self.gclient(['runhooks', '--deps', 'mac'])
-    tree = self.mangle_git_tree(('repo_5@2', 'src'),
-                                ('repo_1@2', 'src/repo1'),
-                                ('repo_2@1', 'src/repo2')
-                                )
-    self.assertTree(tree)
-
-    # Pre-DEPS hooks run when syncing with --nohooks.
-    self.gclient(['sync', '--deps', 'mac', '--nohooks',
-                  '--revision', 'src@' + self.githash('repo_5', 2)])
-    tree = self.mangle_git_tree(('repo_5@2', 'src'),
-                                ('repo_1@2', 'src/repo1'),
-                                ('repo_2@1', 'src/repo2')
-                                )
-    tree['src/git_pre_deps_hooked'] = 'git_pre_deps_hooked'
-    self.assertTree(tree)
-
-    os.remove(join(self.root_dir, 'src', 'git_pre_deps_hooked'))
-
-    # Pre-DEPS hooks don't run with --noprehooks
-    self.gclient(['sync', '--deps', 'mac', '--noprehooks',
-                  '--revision', 'src@' + self.githash('repo_5', 2)])
-    tree = self.mangle_git_tree(('repo_5@2', 'src'),
-                                ('repo_1@2', 'src/repo1'),
-                                ('repo_2@1', 'src/repo2')
-                                )
-    self.assertTree(tree)
-
-  def testPreDepsHooksError(self):
-    self.gclient(['config', self.git_base + 'repo_5', '--name', 'src'])
-    expectated_stdout = [
-        ('running', self.root_dir),                 # git clone
-        ('running', self.root_dir),                 # pre-deps hook
-        ('running', self.root_dir),                 # pre-deps hook (fails)
-    ]
-    expected_stderr = ("Error: Command 'python3 -c import sys; "
-                       "sys.exit(1)' returned non-zero exit status 1 in %s\n" %
-                       (self.root_dir))
-    stdout, stderr, retcode = self.gclient(
-        ['sync', '--deps', 'mac', '--jobs=1', '--revision',
-         'src@' + self.githash('repo_5', 3)], error_ok=True)
-    self.assertEqual(stderr, expected_stderr)
-    self.assertEqual(2, retcode)
-    self.checkBlock(stdout, expectated_stdout)
-
-  def testRevInfo(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['revinfo', '--deps', 'mac'])
-    out = ('src: %(base)srepo_1\n'
-           'src/repo2: %(base)srepo_2@%(hash2)s\n'
-           'src/repo2/repo_renamed: %(base)srepo_3\n' %
-          {
-            'base': self.git_base,
-            'hash2': self.githash('repo_2', 1)[:7],
-          })
-    self.check((out, '', 0), results)
-
-  def testRevInfoActual(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['revinfo', '--deps', 'mac', '--actual'])
-    out = ('src: %(base)srepo_1@%(hash1)s\n'
-           'src/repo2: %(base)srepo_2@%(hash2)s\n'
-           'src/repo2/repo_renamed: %(base)srepo_3@%(hash3)s\n' %
-          {
-            'base': self.git_base,
-            'hash1': self.githash('repo_1', 2),
-            'hash2': self.githash('repo_2', 1),
-            'hash3': self.githash('repo_3', 2),
-          })
-    self.check((out, '', 0), results)
-
-  def testRevInfoFilterPath(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['revinfo', '--deps', 'mac', '--filter', 'src'])
-    out = ('src: %(base)srepo_1\n' %
-          {
-            'base': self.git_base,
-          })
-    self.check((out, '', 0), results)
-
-  def testRevInfoFilterURL(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['revinfo', '--deps', 'mac',
-                            '--filter', '%srepo_2' % self.git_base])
-    out = ('src/repo2: %(base)srepo_2@%(hash2)s\n' %
-          {
-            'base': self.git_base,
-            'hash2': self.githash('repo_2', 1)[:7],
-          })
-    self.check((out, '', 0), results)
-
-  def testRevInfoFilterURLOrPath(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    results = self.gclient(['revinfo', '--deps', 'mac', '--filter', 'src',
-                            '--filter', '%srepo_2' % self.git_base])
-    out = ('src: %(base)srepo_1\n'
-           'src/repo2: %(base)srepo_2@%(hash2)s\n' %
-          {
-            'base': self.git_base,
-            'hash2': self.githash('repo_2', 1)[:7],
-          })
-    self.check((out, '', 0), results)
-
-  def testRevInfoJsonOutput(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    output_json = os.path.join(self.root_dir, 'output.json')
-    self.gclient(['revinfo', '--deps', 'mac', '--output-json', output_json])
-    with open(output_json) as f:
-      output_json = json.load(f)
-
-    out = {
-        'src': {
-            'url': self.git_base + 'repo_1',
-            'rev': None,
-        },
-        'src/repo2': {
-            'url': self.git_base + 'repo_2',
-            'rev': self.githash('repo_2', 1)[:7],
-        },
-       'src/repo2/repo_renamed': {
-           'url': self.git_base + 'repo_3',
-           'rev': None,
-        },
-    }
-    self.assertEqual(out, output_json)
-
-  def testRevInfoJsonOutputSnapshot(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    self.gclient(['sync', '--deps', 'mac'])
-    output_json = os.path.join(self.root_dir, 'output.json')
-    self.gclient(['revinfo', '--deps', 'mac', '--snapshot',
-                  '--output-json', output_json])
-    with open(output_json) as f:
-      output_json = json.load(f)
-
-    out = [{
-        'solution_url': self.git_base + 'repo_1',
-        'managed': True,
-        'name': 'src',
-        'deps_file': 'DEPS',
-        'custom_deps': {
-            'src/repo2': '%srepo_2@%s' % (
-                self.git_base, self.githash('repo_2', 1)),
-            'src/repo2/repo_renamed': '%srepo_3@%s' % (
-                self.git_base, self.githash('repo_3', 2)),
-            'src/should_not_process': None,
-        },
-    }]
-    self.assertEqual(out, output_json)
-
-  def testSetDep(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    fake_deps = os.path.join(self.git_base, 'repo_1', 'DEPS')
-    with open(fake_deps, 'w') as f:
-      f.write('\n'.join([
-          'vars = { ',
-          '  "foo_var": "foo_val",',
-          '  "foo_rev": "foo_rev",',
-          '}',
-          'deps = {',
-          '  "foo": {',
-          '    "url": "url@{foo_rev}",',
-          '  },',
-          '  "bar": "url@bar_rev",',
-          '}',
-      ]))
-
-    self.gclient([
-        'setdep', '-r', 'foo@new_foo', '-r', 'bar@new_bar', '--var',
-        'foo_var=new_val', '--deps-file', fake_deps
-    ],
-                 cwd=self.git_base + 'repo_1')
-
-    with open(fake_deps) as f:
-      contents = f.read().splitlines()
-
-    self.assertEqual([
-          'vars = { ',
-          '  "foo_var": "new_val",',
-          '  "foo_rev": "new_foo",',
-          '}',
-          'deps = {',
-          '  "foo": {',
-          '    "url": "url@{foo_rev}",',
-          '  },',
-          '  "bar": "url@new_bar",',
-          '}',
-    ], contents)
-
-  def testSetDep_Submodules(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    with open(os.path.join(self.git_base, '.gclient'), 'w') as f:
-      f.write('')
-
-    fake_deps = os.path.join(self.git_base, 'repo_1', 'DEPS')
-    gitmodules = os.path.join(self.git_base, 'repo_1', '.gitmodules')
-    with open(fake_deps, 'w') as f:
-      f.write('\n'.join([
-          'git_dependencies = "SYNC"',
-          'vars = { ',
-          '  "foo_var": "foo_val",',
-          '  "foo_rev": "foo_rev",',
-          '}',
-          'deps = { ',
-          '  "repo_1/foo": "https://foo" + Var("foo_rev"),',
-          '  "repo_1/bar": "https://bar@barrev",',
-          '}',
-      ]))
-
-    with open(gitmodules, 'w') as f:
-      f.write('\n'.join([
-          '[submodule "foo"]', '  url = https://foo', '  path = foo',
-          '[submodule "bar"]', '  url = https://bar', '  path = bar'
-      ]))
-
-    subprocess2.call([
-        'git', 'update-index', '--add', '--cacheinfo',
-        '160000,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,foo'
-    ],
+        }]
+        self.assertEqual(out, output_json)
+
+    def testSetDep(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        fake_deps = os.path.join(self.git_base, 'repo_1', 'DEPS')
+        with open(fake_deps, 'w') as f:
+            f.write('\n'.join([
+                'vars = { ',
+                '  "foo_var": "foo_val",',
+                '  "foo_rev": "foo_rev",',
+                '}',
+                'deps = {',
+                '  "foo": {',
+                '    "url": "url@{foo_rev}",',
+                '  },',
+                '  "bar": "url@bar_rev",',
+                '}',
+            ]))
+
+        self.gclient([
+            'setdep', '-r', 'foo@new_foo', '-r', 'bar@new_bar', '--var',
+            'foo_var=new_val', '--deps-file', fake_deps
+        ],
                      cwd=self.git_base + 'repo_1')
-    subprocess2.call([
-        'git', 'update-index', '--add', '--cacheinfo',
-        '160000,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,bar'
-    ],
+
+        with open(fake_deps) as f:
+            contents = f.read().splitlines()
+
+        self.assertEqual([
+            'vars = { ',
+            '  "foo_var": "new_val",',
+            '  "foo_rev": "new_foo",',
+            '}',
+            'deps = {',
+            '  "foo": {',
+            '    "url": "url@{foo_rev}",',
+            '  },',
+            '  "bar": "url@new_bar",',
+            '}',
+        ], contents)
+
+    def testSetDep_Submodules(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        with open(os.path.join(self.git_base, '.gclient'), 'w') as f:
+            f.write('')
+
+        fake_deps = os.path.join(self.git_base, 'repo_1', 'DEPS')
+        gitmodules = os.path.join(self.git_base, 'repo_1', '.gitmodules')
+        with open(fake_deps, 'w') as f:
+            f.write('\n'.join([
+                'git_dependencies = "SYNC"',
+                'vars = { ',
+                '  "foo_var": "foo_val",',
+                '  "foo_rev": "foo_rev",',
+                '}',
+                'deps = { ',
+                '  "repo_1/foo": "https://foo" + Var("foo_rev"),',
+                '  "repo_1/bar": "https://bar@barrev",',
+                '}',
+            ]))
+
+        with open(gitmodules, 'w') as f:
+            f.write('\n'.join([
+                '[submodule "foo"]', '  url = https://foo', '  path = foo',
+                '[submodule "bar"]', '  url = https://bar', '  path = bar'
+            ]))
+
+        subprocess2.call([
+            'git', 'update-index', '--add', '--cacheinfo',
+            '160000,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,foo'
+        ],
+                         cwd=self.git_base + 'repo_1')
+        subprocess2.call([
+            'git', 'update-index', '--add', '--cacheinfo',
+            '160000,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,bar'
+        ],
+                         cwd=self.git_base + 'repo_1')
+
+        self.gclient([
+            'setdep',
+            '-r',
+            'repo_1/foo@new_foo',
+            '--var',
+            'foo_var=new_val',
+            '-r',
+            'repo_1/bar@bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
+        ],
                      cwd=self.git_base + 'repo_1')
 
-    self.gclient([
-        'setdep',
-        '-r',
-        'repo_1/foo@new_foo',
-        '--var',
-        'foo_var=new_val',
-        '-r',
-        'repo_1/bar@bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb',
-    ],
-                 cwd=self.git_base + 'repo_1')
-
-    with open(fake_deps) as f:
-      contents = f.read().splitlines()
-
-    self.assertEqual([
-        'git_dependencies = "SYNC"',
-        'vars = { ',
-        '  "foo_var": "new_val",',
-        '  "foo_rev": "new_foo",',
-        '}',
-        'deps = { ',
-        '  "repo_1/foo": "https://foo" + Var("foo_rev"),',
-        '  "repo_1/bar": '
-        '"https://bar@bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",',
-        '}',
-    ], contents)
-
-  def testSetDep_Submodules_relative(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    fake_deps = os.path.join(self.git_base, 'repo_1', 'DEPS')
-    gitmodules = os.path.join(self.git_base, 'repo_1', '.gitmodules')
-    with open(fake_deps, 'w') as f:
-      f.write('\n'.join([
-          'git_dependencies = "SUBMODULES"',
-          'use_relative_paths = True',
-          'vars = { ',
-          '  "foo_var": "foo_val",',
-          '  "foo_rev": "foo_rev",',
-          '}',
-      ]))
-
-    with open(gitmodules, 'w') as f:
-      f.write('\n'.join(
-          ['[submodule "foo"]', '  url = https://foo', '  path = foo']))
-
-    subprocess2.call([
-        'git', 'update-index', '--add', '--cacheinfo',
-        '160000,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,foo'
-    ],
+        with open(fake_deps) as f:
+            contents = f.read().splitlines()
+
+        self.assertEqual([
+            'git_dependencies = "SYNC"',
+            'vars = { ',
+            '  "foo_var": "new_val",',
+            '  "foo_rev": "new_foo",',
+            '}',
+            'deps = { ',
+            '  "repo_1/foo": "https://foo" + Var("foo_rev"),',
+            '  "repo_1/bar": '
+            '"https://bar@bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb",',
+            '}',
+        ], contents)
+
+    def testSetDep_Submodules_relative(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        fake_deps = os.path.join(self.git_base, 'repo_1', 'DEPS')
+        gitmodules = os.path.join(self.git_base, 'repo_1', '.gitmodules')
+        with open(fake_deps, 'w') as f:
+            f.write('\n'.join([
+                'git_dependencies = "SUBMODULES"',
+                'use_relative_paths = True',
+                'vars = { ',
+                '  "foo_var": "foo_val",',
+                '  "foo_rev": "foo_rev",',
+                '}',
+            ]))
+
+        with open(gitmodules, 'w') as f:
+            f.write('\n'.join(
+                ['[submodule "foo"]', '  url = https://foo', '  path = foo']))
+
+        subprocess2.call([
+            'git', 'update-index', '--add', '--cacheinfo',
+            '160000,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,foo'
+        ],
+                         cwd=self.git_base + 'repo_1')
+
+        self.gclient([
+            'setdep', '-r', 'foo@new_foo', '--var', 'foo_var=new_val',
+            '--deps-file', fake_deps
+        ],
                      cwd=self.git_base + 'repo_1')
 
-    self.gclient([
-        'setdep', '-r', 'foo@new_foo', '--var', 'foo_var=new_val',
-        '--deps-file', fake_deps
-    ],
-                 cwd=self.git_base + 'repo_1')
-
-    with open(fake_deps) as f:
-      contents = f.read().splitlines()
-
-    self.assertEqual([
-        'git_dependencies = "SUBMODULES"',
-        'use_relative_paths = True',
-        'vars = { ',
-        '  "foo_var": "new_val",',
-        '  "foo_rev": "foo_rev",',
-        '}',
-    ], contents)
-
-  def testSetDep_BuiltinVariables(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'],
-                 cwd=self.git_base)
-
-    fake_deps = os.path.join(self.root_dir, 'DEPS')
-    with open(fake_deps, 'w') as f:
-      f.write('\n'.join([
-          'vars = { ',
-          '  "foo_var": "foo_val",',
-          '  "foo_rev": "foo_rev",',
-          '}',
-          'deps = {',
-          '  "foo": {',
-          '    "url": "url@{foo_rev}",',
-          '  },',
-          '  "bar": "url@bar_rev",',
-          '}',
-          'hooks = [{',
-          '  "name": "uses_builtin_var",',
-          '  "pattern": ".",',
-          '  "action": ["python3", "fake.py",',
-          '             "--with-android={checkout_android}"],',
-          '}]',
-      ]))
-
-    self.gclient([
-        'setdep', '-r', 'foo@new_foo', '-r', 'bar@new_bar', '--var',
-        'foo_var=new_val', '--deps-file', fake_deps
-    ],
-                 cwd=self.git_base + 'repo_1')
-
-    with open(fake_deps) as f:
-      contents = f.read().splitlines()
-
-    self.assertEqual([
-          'vars = { ',
-          '  "foo_var": "new_val",',
-          '  "foo_rev": "new_foo",',
-          '}',
-          'deps = {',
-          '  "foo": {',
-          '    "url": "url@{foo_rev}",',
-          '  },',
-          '  "bar": "url@new_bar",',
-          '}',
-          'hooks = [{',
-          '  "name": "uses_builtin_var",',
-          '  "pattern": ".",',
-          '  "action": ["python3", "fake.py",',
-          '             "--with-android={checkout_android}"],',
-          '}]',
-    ], contents)
-
-  def testGetDep(self):
-    fake_deps = os.path.join(self.root_dir, 'DEPS.fake')
-    with open(fake_deps, 'w') as f:
-      f.write('\n'.join([
-          'vars = { ',
-          '  "foo_var": "foo_val",',
-          '  "foo_rev": "foo_rev",',
-          '}',
-          'deps = {',
-          '  "foo": {',
-          '    "url": "url@{foo_rev}",',
-          '  },',
-          '  "bar": "url@bar_rev",',
-          '}',
-      ]))
-
-    results = self.gclient([
-        'getdep', '-r', 'foo', '-r', 'bar','--var', 'foo_var',
-        '--deps-file', fake_deps])
-
-    self.assertEqual([
-        'foo_val',
-        'foo_rev',
-        'bar_rev',
-    ], results[0].splitlines())
-
-  def testGetDep_Submodule(self):
-    self.gclient(['config', self.git_base + 'repo_20', '--name', 'src'])
-    subprocess2.call([
-        'git', 'update-index', '--add', '--cacheinfo',
-        '160000,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,bar'
-    ],
-                     cwd=self.git_base + 'repo_20')
+        with open(fake_deps) as f:
+            contents = f.read().splitlines()
+
+        self.assertEqual([
+            'git_dependencies = "SUBMODULES"',
+            'use_relative_paths = True',
+            'vars = { ',
+            '  "foo_var": "new_val",',
+            '  "foo_rev": "foo_rev",',
+            '}',
+        ], contents)
+
+    def testSetDep_BuiltinVariables(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'],
+                     cwd=self.git_base)
+
+        fake_deps = os.path.join(self.root_dir, 'DEPS')
+        with open(fake_deps, 'w') as f:
+            f.write('\n'.join([
+                'vars = { ',
+                '  "foo_var": "foo_val",',
+                '  "foo_rev": "foo_rev",',
+                '}',
+                'deps = {',
+                '  "foo": {',
+                '    "url": "url@{foo_rev}",',
+                '  },',
+                '  "bar": "url@bar_rev",',
+                '}',
+                'hooks = [{',
+                '  "name": "uses_builtin_var",',
+                '  "pattern": ".",',
+                '  "action": ["python3", "fake.py",',
+                '             "--with-android={checkout_android}"],',
+                '}]',
+            ]))
+
+        self.gclient([
+            'setdep', '-r', 'foo@new_foo', '-r', 'bar@new_bar', '--var',
+            'foo_var=new_val', '--deps-file', fake_deps
+        ],
+                     cwd=self.git_base + 'repo_1')
+
+        with open(fake_deps) as f:
+            contents = f.read().splitlines()
+
+        self.assertEqual([
+            'vars = { ',
+            '  "foo_var": "new_val",',
+            '  "foo_rev": "new_foo",',
+            '}',
+            'deps = {',
+            '  "foo": {',
+            '    "url": "url@{foo_rev}",',
+            '  },',
+            '  "bar": "url@new_bar",',
+            '}',
+            'hooks = [{',
+            '  "name": "uses_builtin_var",',
+            '  "pattern": ".",',
+            '  "action": ["python3", "fake.py",',
+            '             "--with-android={checkout_android}"],',
+            '}]',
+        ], contents)
+
+    def testGetDep(self):
+        fake_deps = os.path.join(self.root_dir, 'DEPS.fake')
+        with open(fake_deps, 'w') as f:
+            f.write('\n'.join([
+                'vars = { ',
+                '  "foo_var": "foo_val",',
+                '  "foo_rev": "foo_rev",',
+                '}',
+                'deps = {',
+                '  "foo": {',
+                '    "url": "url@{foo_rev}",',
+                '  },',
+                '  "bar": "url@bar_rev",',
+                '}',
+            ]))
+
+        results = self.gclient([
+            'getdep', '-r', 'foo', '-r', 'bar', '--var', 'foo_var',
+            '--deps-file', fake_deps
+        ])
+
+        self.assertEqual([
+            'foo_val',
+            'foo_rev',
+            'bar_rev',
+        ], results[0].splitlines())
+
+    def testGetDep_Submodule(self):
+        self.gclient(['config', self.git_base + 'repo_20', '--name', 'src'])
+        subprocess2.call([
+            'git', 'update-index', '--add', '--cacheinfo',
+            '160000,aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,bar'
+        ],
+                         cwd=self.git_base + 'repo_20')
+
+        results = self.gclient([
+            'getdep', '-r', 'foo/bar:lemur', '-r', 'bar', '--var',
+            'foo_checkout'
+        ],
+                               cwd=self.git_base + 'repo_20')
+
+        self.assertEqual([
+            'True', 'version:1234', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
+        ], results[0].splitlines())
+
+    def testGetDep_BuiltinVariables(self):
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+        fake_deps = os.path.join(self.root_dir, 'DEPS.fake')
+        with open(fake_deps, 'w') as f:
+            f.write('\n'.join([
+                'vars = { ',
+                '  "foo_var": "foo_val",',
+                '  "foo_rev": "foo_rev",',
+                '}',
+                'deps = {',
+                '  "foo": {',
+                '    "url": "url@{foo_rev}",',
+                '  },',
+                '  "bar": "url@bar_rev",',
+                '}',
+                'hooks = [{',
+                '  "name": "uses_builtin_var",',
+                '  "pattern": ".",',
+                '  "action": ["python3", "fake.py",',
+                '             "--with-android={checkout_android}"],',
+                '}]',
+            ]))
+
+        results = self.gclient([
+            'getdep', '-r', 'foo', '-r', 'bar', '--var', 'foo_var',
+            '--deps-file', fake_deps
+        ])
+
+        self.assertEqual([
+            'foo_val',
+            'foo_rev',
+            'bar_rev',
+        ], results[0].splitlines())
+
+    # TODO(crbug.com/1024683): Enable for windows.
+    @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
+    def testFlatten(self):
+        output_deps = os.path.join(self.root_dir, 'DEPS.flattened')
+        self.assertFalse(os.path.exists(output_deps))
+
+        self.gclient([
+            'config',
+            self.git_base + 'repo_6',
+            '--name',
+            'src',
+            # This should be ignored because 'custom_true_var' isn't
+            # defined in the DEPS.
+            '--custom-var',
+            'custom_true_var=True',
+            # This should override 'true_var=True' from the DEPS.
+            '--custom-var',
+            'true_var="False"'
+        ])
+        self.gclient(['sync'])
+        self.gclient(
+            ['flatten', '-v', '-v', '-v', '--output-deps', output_deps])
+
+        # Assert we can sync to the flattened DEPS we just wrote.
+        solutions = [{
+            "url": self.git_base + 'repo_6',
+            'name': 'src',
+            'deps_file': output_deps
+        }]
+        self.gclient(['sync', '--spec=solutions=%s' % solutions])
+
+        with open(output_deps) as f:
+            deps_contents = f.read()
+
+        self.assertEqual([
+            'gclient_gn_args_file = "src/repo2/gclient.args"',
+            'gclient_gn_args = [\'false_var\', \'false_str_var\', \'true_var\', '
+            '\'true_str_var\', \'str_var\', \'cond_var\']',
+            'allowed_hosts = [',
+            '  "' + self.git_base + '",',
+            ']',
+            '',
+            'deps = {',
+            '  # "src" -> "src/repo2" -> "foo/bar"',
+            '  "foo/bar": {',
+            '    "url": "' + self.git_base + 'repo_3",',
+            '    "condition": \'(repo2_false_var) and (true_str_var)\',',
+            '  },',
+            '',
+            '  # "src"',
+            '  "src": {',
+            '    "url": "' + self.git_base + 'repo_6",',
+            '  },',
+            '',
+            '  # "src" -> "src/mac_repo"',
+            '  "src/mac_repo": {',
+            '    "url": "' + self.git_base + 'repo_5",',
+            '    "condition": \'checkout_mac\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo8" -> "src/recursed_os_repo"',
+            '  "src/recursed_os_repo": {',
+            '    "url": "' + self.git_base + 'repo_5",',
+            '    "condition": \'(checkout_linux) or (checkout_mac)\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo15"',
+            '  "src/repo15": {',
+            '    "url": "' + self.git_base + 'repo_15",',
+            '  },',
+            '',
+            '  # "src" -> "src/repo16"',
+            '  "src/repo16": {',
+            '    "url": "' + self.git_base + 'repo_16",',
+            '  },',
+            '',
+            '  # "src" -> "src/repo2"',
+            '  "src/repo2": {',
+            '    "url": "' + self.git_base + 'repo_2@%s",' %
+            (self.githash('repo_2', 1)[:7]),
+            '    "condition": \'true_str_var\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo4"',
+            '  "src/repo4": {',
+            '    "url": "' + self.git_base + 'repo_4",',
+            '    "condition": \'False\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo8"',
+            '  "src/repo8": {',
+            '    "url": "' + self.git_base + 'repo_8",',
+            '  },',
+            '',
+            '  # "src" -> "src/unix_repo"',
+            '  "src/unix_repo": {',
+            '    "url": "' + self.git_base + 'repo_5",',
+            '    "condition": \'checkout_linux\',',
+            '  },',
+            '',
+            '  # "src" -> "src/win_repo"',
+            '  "src/win_repo": {',
+            '    "url": "' + self.git_base + 'repo_5",',
+            '    "condition": \'checkout_win\',',
+            '  },',
+            '',
+            '}',
+            '',
+            'hooks = [',
+            '  # "src"',
+            '  {',
+            '    "pattern": ".",',
+            '    "condition": \'True\',',
+            '    "cwd": ".",',
+            '    "action": [',
+            '        "python3",',
+            '        "-c",',
+            '        "open(\'src/git_hooked1\', \'w\')'
+            '.write(\'git_hooked1\')",',
+            '    ]',
+            '  },',
+            '',
+            '  # "src"',
+            '  {',
+            '    "pattern": "nonexistent",',
+            '    "cwd": ".",',
+            '    "action": [',
+            '        "python3",',
+            '        "-c",',
+            '        "open(\'src/git_hooked2\', \'w\').write(\'git_hooked2\')",',
+            '    ]',
+            '  },',
+            '',
+            '  # "src"',
+            '  {',
+            '    "pattern": ".",',
+            '    "condition": \'checkout_mac\',',
+            '    "cwd": ".",',
+            '    "action": [',
+            '        "python3",',
+            '        "-c",',
+            '        "open(\'src/git_hooked_mac\', \'w\').write('
+            '\'git_hooked_mac\')",',
+            '    ]',
+            '  },',
+            '',
+            '  # "src" -> "src/repo15"',
+            '  {',
+            '    "name": "absolute_cwd",',
+            '    "pattern": ".",',
+            '    "cwd": ".",',
+            '    "action": [',
+            '        "python3",',
+            '        "-c",',
+            '        "pass",',
+            '    ]',
+            '  },',
+            '',
+            '  # "src" -> "src/repo16"',
+            '  {',
+            '    "name": "relative_cwd",',
+            '    "pattern": ".",',
+            '    "cwd": "src/repo16",',
+            '    "action": [',
+            '        "python3",',
+            '        "relative.py",',
+            '    ]',
+            '  },',
+            '',
+            ']',
+            '',
+            'vars = {',
+            '  # "src"',
+            '  "DummyVariable": \'repo\',',
+            '',
+            '  # "src"',
+            '  "cond_var": \'false_str_var and true_var\',',
+            '',
+            '  # "src"',
+            '  "false_str_var": \'False\',',
+            '',
+            '  # "src"',
+            '  "false_var": False,',
+            '',
+            '  # "src"',
+            '  "git_base": \'' + self.git_base + '\',',
+            '',
+            '  # "src"',
+            '  "hook1_contents": \'git_hooked1\',',
+            '',
+            '  # "src" -> "src/repo2"',
+            '  "repo2_false_var": \'False\',',
+            '',
+            '  # "src"',
+            '  "repo5_var": \'/repo_5\',',
+            '',
+            '  # "src"',
+            '  "str_var": \'abc\',',
+            '',
+            '  # "src"',
+            '  "true_str_var": \'True\',',
+            '',
+            '  # "src" [custom_var override]',
+            '  "true_var": \'False\',',
+            '',
+            '}',
+            '',
+            '# ' + self.git_base + 'repo_15, DEPS',
+            '# ' + self.git_base + 'repo_16, DEPS',
+            '# ' + self.git_base + 'repo_2@%s, DEPS' %
+            (self.githash('repo_2', 1)[:7]),
+            '# ' + self.git_base + 'repo_6, DEPS',
+            '# ' + self.git_base + 'repo_8, DEPS',
+        ], deps_contents.splitlines())
+
+    # TODO(crbug.com/1024683): Enable for windows.
+    @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
+    def testFlattenPinAllDeps(self):
+        output_deps = os.path.join(self.root_dir, 'DEPS.flattened')
+        self.assertFalse(os.path.exists(output_deps))
+
+        self.gclient(['config', self.git_base + 'repo_6', '--name', 'src'])
+        self.gclient(['sync', '--process-all-deps'])
+        self.gclient([
+            'flatten', '-v', '-v', '-v', '--output-deps', output_deps,
+            '--pin-all-deps'
+        ])
+
+        with open(output_deps) as f:
+            deps_contents = f.read()
+
+        self.assertEqual([
+            'gclient_gn_args_file = "src/repo2/gclient.args"',
+            'gclient_gn_args = [\'false_var\', \'false_str_var\', \'true_var\', '
+            '\'true_str_var\', \'str_var\', \'cond_var\']',
+            'allowed_hosts = [',
+            '  "' + self.git_base + '",',
+            ']',
+            '',
+            'deps = {',
+            '  # "src" -> "src/repo2" -> "foo/bar"',
+            '  "foo/bar": {',
+            '    "url": "' + self.git_base + 'repo_3@%s",' %
+            (self.githash('repo_3', 2)),
+            '    "condition": \'(repo2_false_var) and (true_str_var)\',',
+            '  },',
+            '',
+            '  # "src"',
+            '  "src": {',
+            '    "url": "' + self.git_base + 'repo_6@%s",' %
+            (self.githash('repo_6', 1)),
+            '  },',
+            '',
+            '  # "src" -> "src/mac_repo"',
+            '  "src/mac_repo": {',
+            '    "url": "' + self.git_base + 'repo_5@%s",' %
+            (self.githash('repo_5', 3)),
+            '    "condition": \'checkout_mac\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo8" -> "src/recursed_os_repo"',
+            '  "src/recursed_os_repo": {',
+            '    "url": "' + self.git_base + 'repo_5@%s",' %
+            (self.githash('repo_5', 3)),
+            '    "condition": \'(checkout_linux) or (checkout_mac)\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo15"',
+            '  "src/repo15": {',
+            '    "url": "' + self.git_base + 'repo_15@%s",' %
+            (self.githash('repo_15', 1)),
+            '  },',
+            '',
+            '  # "src" -> "src/repo16"',
+            '  "src/repo16": {',
+            '    "url": "' + self.git_base + 'repo_16@%s",' %
+            (self.githash('repo_16', 1)),
+            '  },',
+            '',
+            '  # "src" -> "src/repo2"',
+            '  "src/repo2": {',
+            '    "url": "' + self.git_base + 'repo_2@%s",' %
+            (self.githash('repo_2', 1)),
+            '    "condition": \'true_str_var\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo4"',
+            '  "src/repo4": {',
+            '    "url": "' + self.git_base + 'repo_4@%s",' %
+            (self.githash('repo_4', 2)),
+            '    "condition": \'False\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo8"',
+            '  "src/repo8": {',
+            '    "url": "' + self.git_base + 'repo_8@%s",' %
+            (self.githash('repo_8', 1)),
+            '  },',
+            '',
+            '  # "src" -> "src/unix_repo"',
+            '  "src/unix_repo": {',
+            '    "url": "' + self.git_base + 'repo_5@%s",' %
+            (self.githash('repo_5', 3)),
+            '    "condition": \'checkout_linux\',',
+            '  },',
+            '',
+            '  # "src" -> "src/win_repo"',
+            '  "src/win_repo": {',
+            '    "url": "' + self.git_base + 'repo_5@%s",' %
+            (self.githash('repo_5', 3)),
+            '    "condition": \'checkout_win\',',
+            '  },',
+            '',
+            '}',
+            '',
+            'hooks = [',
+            '  # "src"',
+            '  {',
+            '    "pattern": ".",',
+            '    "condition": \'True\',',
+            '    "cwd": ".",',
+            '    "action": [',
+            '        "python3",',
+            '        "-c",',
+            '        "open(\'src/git_hooked1\', \'w\')'
+            '.write(\'git_hooked1\')",',
+            '    ]',
+            '  },',
+            '',
+            '  # "src"',
+            '  {',
+            '    "pattern": "nonexistent",',
+            '    "cwd": ".",',
+            '    "action": [',
+            '        "python3",',
+            '        "-c",',
+            '        "open(\'src/git_hooked2\', \'w\').write(\'git_hooked2\')",',
+            '    ]',
+            '  },',
+            '',
+            '  # "src"',
+            '  {',
+            '    "pattern": ".",',
+            '    "condition": \'checkout_mac\',',
+            '    "cwd": ".",',
+            '    "action": [',
+            '        "python3",',
+            '        "-c",',
+            '        "open(\'src/git_hooked_mac\', \'w\').write('
+            '\'git_hooked_mac\')",',
+            '    ]',
+            '  },',
+            '',
+            '  # "src" -> "src/repo15"',
+            '  {',
+            '    "name": "absolute_cwd",',
+            '    "pattern": ".",',
+            '    "cwd": ".",',
+            '    "action": [',
+            '        "python3",',
+            '        "-c",',
+            '        "pass",',
+            '    ]',
+            '  },',
+            '',
+            '  # "src" -> "src/repo16"',
+            '  {',
+            '    "name": "relative_cwd",',
+            '    "pattern": ".",',
+            '    "cwd": "src/repo16",',
+            '    "action": [',
+            '        "python3",',
+            '        "relative.py",',
+            '    ]',
+            '  },',
+            '',
+            ']',
+            '',
+            'vars = {',
+            '  # "src"',
+            '  "DummyVariable": \'repo\',',
+            '',
+            '  # "src"',
+            '  "cond_var": \'false_str_var and true_var\',',
+            '',
+            '  # "src"',
+            '  "false_str_var": \'False\',',
+            '',
+            '  # "src"',
+            '  "false_var": False,',
+            '',
+            '  # "src"',
+            '  "git_base": \'' + self.git_base + '\',',
+            '',
+            '  # "src"',
+            '  "hook1_contents": \'git_hooked1\',',
+            '',
+            '  # "src" -> "src/repo2"',
+            '  "repo2_false_var": \'False\',',
+            '',
+            '  # "src"',
+            '  "repo5_var": \'/repo_5\',',
+            '',
+            '  # "src"',
+            '  "str_var": \'abc\',',
+            '',
+            '  # "src"',
+            '  "true_str_var": \'True\',',
+            '',
+            '  # "src"',
+            '  "true_var": True,',
+            '',
+            '}',
+            '',
+            '# ' + self.git_base + 'repo_15@%s, DEPS' %
+            (self.githash('repo_15', 1)),
+            '# ' + self.git_base + 'repo_16@%s, DEPS' %
+            (self.githash('repo_16', 1)),
+            '# ' + self.git_base + 'repo_2@%s, DEPS' %
+            (self.githash('repo_2', 1)),
+            '# ' + self.git_base + 'repo_6@%s, DEPS' %
+            (self.githash('repo_6', 1)),
+            '# ' + self.git_base + 'repo_8@%s, DEPS' %
+            (self.githash('repo_8', 1)),
+        ], deps_contents.splitlines())
+
+    # TODO(crbug.com/1024683): Enable for windows.
+    @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
+    def testFlattenRecursedeps(self):
+        output_deps = os.path.join(self.root_dir, 'DEPS.flattened')
+        self.assertFalse(os.path.exists(output_deps))
+
+        output_deps_files = os.path.join(self.root_dir, 'DEPS.files')
+        self.assertFalse(os.path.exists(output_deps_files))
+
+        self.gclient(['config', self.git_base + 'repo_10', '--name', 'src'])
+        self.gclient(['sync', '--process-all-deps'])
+        self.gclient([
+            'flatten', '-v', '-v', '-v', '--output-deps', output_deps,
+            '--output-deps-files', output_deps_files
+        ])
+
+        with open(output_deps) as f:
+            deps_contents = f.read()
+
+        self.assertEqual([
+            'gclient_gn_args_file = "src/repo8/gclient.args"',
+            "gclient_gn_args = ['str_var']",
+            'deps = {',
+            '  # "src"',
+            '  "src": {',
+            '    "url": "' + self.git_base + 'repo_10",',
+            '  },',
+            '',
+            '  # "src" -> "src/repo9" -> "src/repo8" -> "src/recursed_os_repo"',
+            '  "src/recursed_os_repo": {',
+            '    "url": "' + self.git_base + 'repo_5",',
+            '    "condition": \'(checkout_linux) or (checkout_mac)\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo11"',
+            '  "src/repo11": {',
+            '    "url": "' + self.git_base + 'repo_11",',
+            '    "condition": \'(checkout_ios) or (checkout_mac)\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo11" -> "src/repo12"',
+            '  "src/repo12": {',
+            '    "url": "' + self.git_base + 'repo_12",',
+            '    "condition": \'(checkout_ios) or (checkout_mac)\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo9" -> "src/repo4"',
+            '  "src/repo4": {',
+            '    "url": "' + self.git_base + 'repo_4",',
+            '    "condition": \'checkout_android\',',
+            '  },',
+            '',
+            '  # "src" -> "src/repo6"',
+            '  "src/repo6": {',
+            '    "url": "' + self.git_base + 'repo_6",',
+            '  },',
+            '',
+            '  # "src" -> "src/repo9" -> "src/repo7"',
+            '  "src/repo7": {',
+            '    "url": "' + self.git_base + 'repo_7",',
+            '  },',
+            '',
+            '  # "src" -> "src/repo9" -> "src/repo8"',
+            '  "src/repo8": {',
+            '    "url": "' + self.git_base + 'repo_8",',
+            '  },',
+            '',
+            '  # "src" -> "src/repo9"',
+            '  "src/repo9": {',
+            '    "url": "' + self.git_base + 'repo_9",',
+            '  },',
+            '',
+            '}',
+            '',
+            'vars = {',
+            '  # "src" -> "src/repo9"',
+            '  "str_var": \'xyz\',',
+            '',
+            '}',
+            '',
+            '# ' + self.git_base + 'repo_10, DEPS',
+            '# ' + self.git_base + 'repo_11, DEPS',
+            '# ' + self.git_base + 'repo_8, DEPS',
+            '# ' + self.git_base + 'repo_9, DEPS',
+        ], deps_contents.splitlines())
+
+        with open(output_deps_files) as f:
+            deps_files_contents = json.load(f)
+
+        self.assertEqual([
+            {
+                'url': self.git_base + 'repo_10',
+                'deps_file': 'DEPS',
+                'hierarchy': [['src', self.git_base + 'repo_10']]
+            },
+            {
+                'url':
+                self.git_base + 'repo_11',
+                'deps_file':
+                'DEPS',
+                'hierarchy': [['src', self.git_base + 'repo_10'],
+                              ['src/repo11', self.git_base + 'repo_11']]
+            },
+            {
+                'url':
+                self.git_base + 'repo_8',
+                'deps_file':
+                'DEPS',
+                'hierarchy': [['src', self.git_base + 'repo_10'],
+                              ['src/repo9', self.git_base + 'repo_9'],
+                              ['src/repo8', self.git_base + 'repo_8']]
+            },
+            {
+                'url':
+                self.git_base + 'repo_9',
+                'deps_file':
+                'DEPS',
+                'hierarchy': [['src', self.git_base + 'repo_10'],
+                              ['src/repo9', self.git_base + 'repo_9']]
+            },
+        ], deps_files_contents)
+
+    # TODO(crbug.com/1024683): Enable for windows.
+    @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
+    def testFlattenCipd(self):
+        output_deps = os.path.join(self.root_dir, 'DEPS.flattened')
+        self.assertFalse(os.path.exists(output_deps))
+
+        self.gclient(['config', self.git_base + 'repo_14', '--name', 'src'])
+        self.gclient(['sync'])
+        self.gclient(
+            ['flatten', '-v', '-v', '-v', '--output-deps', output_deps])
+
+        with open(output_deps) as f:
+            deps_contents = f.read()
+
+        self.assertEqual([
+            'deps = {',
+            '  # "src"',
+            '  "src": {',
+            '    "url": "' + self.git_base + 'repo_14",',
+            '  },',
+            '',
+            '  # "src" -> src/another_cipd_dep',
+            '  "src/another_cipd_dep": {',
+            '    "packages": [',
+            '      {',
+            '        "package": "package1",',
+            '        "version": "1.1-cr0",',
+            '      },',
+            '      {',
+            '        "package": "package2",',
+            '        "version": "1.13",',
+            '      },',
+            '    ],',
+            '    "dep_type": "cipd",',
+            '  },',
+            '',
+            '  # "src" -> src/cipd_dep',
+            '  "src/cipd_dep": {',
+            '    "packages": [',
+            '      {',
+            '        "package": "package0",',
+            '        "version": "0.1",',
+            '      },',
+            '    ],',
+            '    "dep_type": "cipd",',
+            '  },',
+            '',
+            '  # "src" -> src/cipd_dep_with_cipd_variable',
+            '  "src/cipd_dep_with_cipd_variable": {',
+            '    "packages": [',
+            '      {',
+            '        "package": "package3/${{platform}}",',
+            '        "version": "1.2",',
+            '      },',
+            '    ],',
+            '    "dep_type": "cipd",',
+            '  },',
+            '',
+            '}',
+            '',
+            '# ' + self.git_base + 'repo_14, DEPS',
+        ], deps_contents.splitlines())
+
+    def testRelativeGNArgsFile(self):
+        self.gclient(['config', self.git_base + 'repo_17', '--name', 'src'])
+        self.gclient([
+            'sync',
+        ])
+
+        tree = self.mangle_git_tree(('repo_17@1', 'src'))
+        tree['src/repo17_gclient.args'] = '\n'.join([
+            '# Generated from \'DEPS\'',
+            'toto = "tata"',
+        ])
+        self.assertTree(tree)
 
-    results = self.gclient(
-        ['getdep', '-r', 'foo/bar:lemur', '-r', 'bar', '--var', 'foo_checkout'],
-        cwd=self.git_base + 'repo_20')
-
-    self.assertEqual(
-        ['True', 'version:1234', 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'],
-        results[0].splitlines())
-
-  def testGetDep_BuiltinVariables(self):
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-    fake_deps = os.path.join(self.root_dir, 'DEPS.fake')
-    with open(fake_deps, 'w') as f:
-      f.write('\n'.join([
-          'vars = { ',
-          '  "foo_var": "foo_val",',
-          '  "foo_rev": "foo_rev",',
-          '}',
-          'deps = {',
-          '  "foo": {',
-          '    "url": "url@{foo_rev}",',
-          '  },',
-          '  "bar": "url@bar_rev",',
-          '}',
-          'hooks = [{',
-          '  "name": "uses_builtin_var",',
-          '  "pattern": ".",',
-          '  "action": ["python3", "fake.py",',
-          '             "--with-android={checkout_android}"],',
-          '}]',
-      ]))
-
-    results = self.gclient([
-        'getdep', '-r', 'foo', '-r', 'bar','--var', 'foo_var',
-        '--deps-file', fake_deps])
-
-    self.assertEqual([
-        'foo_val',
-        'foo_rev',
-        'bar_rev',
-    ], results[0].splitlines())
-
-  # TODO(crbug.com/1024683): Enable for windows.
-  @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
-  def testFlatten(self):
-    output_deps = os.path.join(self.root_dir, 'DEPS.flattened')
-    self.assertFalse(os.path.exists(output_deps))
-
-    self.gclient(['config', self.git_base + 'repo_6', '--name', 'src',
-                  # This should be ignored because 'custom_true_var' isn't
-                  # defined in the DEPS.
-                  '--custom-var', 'custom_true_var=True',
-                  # This should override 'true_var=True' from the DEPS.
-                  '--custom-var', 'true_var="False"'])
-    self.gclient(['sync'])
-    self.gclient(['flatten', '-v', '-v', '-v', '--output-deps', output_deps])
-
-    # Assert we can sync to the flattened DEPS we just wrote.
-    solutions = [{
-        "url": self.git_base + 'repo_6',
-        'name': 'src',
-        'deps_file': output_deps
-    }]
-    self.gclient([
-        'sync',
-        '--spec=solutions=%s' % solutions
-    ])
-
-    with open(output_deps) as f:
-      deps_contents = f.read()
-
-    self.assertEqual([
-        'gclient_gn_args_file = "src/repo2/gclient.args"',
-        'gclient_gn_args = [\'false_var\', \'false_str_var\', \'true_var\', '
-        '\'true_str_var\', \'str_var\', \'cond_var\']',
-        'allowed_hosts = [',
-        '  "' + self.git_base + '",',
-        ']',
-        '',
-        'deps = {',
-        '  # "src" -> "src/repo2" -> "foo/bar"',
-        '  "foo/bar": {',
-        '    "url": "' + self.git_base + 'repo_3",',
-        '    "condition": \'(repo2_false_var) and (true_str_var)\',',
-        '  },',
-        '',
-        '  # "src"',
-        '  "src": {',
-        '    "url": "' + self.git_base + 'repo_6",',
-        '  },',
-        '',
-        '  # "src" -> "src/mac_repo"',
-        '  "src/mac_repo": {',
-        '    "url": "' + self.git_base + 'repo_5",',
-        '    "condition": \'checkout_mac\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo8" -> "src/recursed_os_repo"',
-        '  "src/recursed_os_repo": {',
-        '    "url": "' + self.git_base + 'repo_5",',
-        '    "condition": \'(checkout_linux) or (checkout_mac)\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo15"',
-        '  "src/repo15": {',
-        '    "url": "' + self.git_base + 'repo_15",',
-        '  },',
-        '',
-        '  # "src" -> "src/repo16"',
-        '  "src/repo16": {',
-        '    "url": "' + self.git_base + 'repo_16",',
-        '  },',
-        '',
-        '  # "src" -> "src/repo2"',
-        '  "src/repo2": {',
-        '    "url": "' + self.git_base + 'repo_2@%s",' %
-        (self.githash('repo_2', 1)[:7]),
-        '    "condition": \'true_str_var\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo4"',
-        '  "src/repo4": {',
-        '    "url": "' + self.git_base + 'repo_4",',
-        '    "condition": \'False\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo8"',
-        '  "src/repo8": {',
-        '    "url": "' + self.git_base + 'repo_8",',
-        '  },',
-        '',
-        '  # "src" -> "src/unix_repo"',
-        '  "src/unix_repo": {',
-        '    "url": "' + self.git_base + 'repo_5",',
-        '    "condition": \'checkout_linux\',',
-        '  },',
-        '',
-        '  # "src" -> "src/win_repo"',
-        '  "src/win_repo": {',
-        '    "url": "' + self.git_base + 'repo_5",',
-        '    "condition": \'checkout_win\',',
-        '  },',
-        '',
-        '}',
-        '',
-        'hooks = [',
-        '  # "src"',
-        '  {',
-        '    "pattern": ".",',
-        '    "condition": \'True\',',
-        '    "cwd": ".",',
-        '    "action": [',
-        '        "python3",',
-        '        "-c",',
-        '        "open(\'src/git_hooked1\', \'w\')'
-        '.write(\'git_hooked1\')",',
-        '    ]',
-        '  },',
-        '',
-        '  # "src"',
-        '  {',
-        '    "pattern": "nonexistent",',
-        '    "cwd": ".",',
-        '    "action": [',
-        '        "python3",',
-        '        "-c",',
-        '        "open(\'src/git_hooked2\', \'w\').write(\'git_hooked2\')",',
-        '    ]',
-        '  },',
-        '',
-        '  # "src"',
-        '  {',
-        '    "pattern": ".",',
-        '    "condition": \'checkout_mac\',',
-        '    "cwd": ".",',
-        '    "action": [',
-        '        "python3",',
-        '        "-c",',
-        '        "open(\'src/git_hooked_mac\', \'w\').write('
-        '\'git_hooked_mac\')",',
-        '    ]',
-        '  },',
-        '',
-        '  # "src" -> "src/repo15"',
-        '  {',
-        '    "name": "absolute_cwd",',
-        '    "pattern": ".",',
-        '    "cwd": ".",',
-        '    "action": [',
-        '        "python3",',
-        '        "-c",',
-        '        "pass",',
-        '    ]',
-        '  },',
-        '',
-        '  # "src" -> "src/repo16"',
-        '  {',
-        '    "name": "relative_cwd",',
-        '    "pattern": ".",',
-        '    "cwd": "src/repo16",',
-        '    "action": [',
-        '        "python3",',
-        '        "relative.py",',
-        '    ]',
-        '  },',
-        '',
-        ']',
-        '',
-        'vars = {',
-        '  # "src"',
-        '  "DummyVariable": \'repo\',',
-        '',
-        '  # "src"',
-        '  "cond_var": \'false_str_var and true_var\',',
-        '',
-        '  # "src"',
-        '  "false_str_var": \'False\',',
-        '',
-        '  # "src"',
-        '  "false_var": False,',
-        '',
-        '  # "src"',
-        '  "git_base": \'' + self.git_base + '\',',
-        '',
-        '  # "src"',
-        '  "hook1_contents": \'git_hooked1\',',
-        '',
-        '  # "src" -> "src/repo2"',
-        '  "repo2_false_var": \'False\',',
-        '',
-        '  # "src"',
-        '  "repo5_var": \'/repo_5\',',
-        '',
-        '  # "src"',
-        '  "str_var": \'abc\',',
-        '',
-        '  # "src"',
-        '  "true_str_var": \'True\',',
-        '',
-        '  # "src" [custom_var override]',
-        '  "true_var": \'False\',',
-        '',
-        '}',
-        '',
-        '# ' + self.git_base + 'repo_15, DEPS',
-        '# ' + self.git_base + 'repo_16, DEPS',
-        '# ' + self.git_base + 'repo_2@%s, DEPS' %
-        (self.githash('repo_2', 1)[:7]),
-        '# ' + self.git_base + 'repo_6, DEPS',
-        '# ' + self.git_base + 'repo_8, DEPS',
-    ], deps_contents.splitlines())
-
-  # TODO(crbug.com/1024683): Enable for windows.
-  @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
-  def testFlattenPinAllDeps(self):
-    output_deps = os.path.join(self.root_dir, 'DEPS.flattened')
-    self.assertFalse(os.path.exists(output_deps))
-
-    self.gclient(['config', self.git_base + 'repo_6', '--name', 'src'])
-    self.gclient(['sync', '--process-all-deps'])
-    self.gclient(['flatten', '-v', '-v', '-v', '--output-deps', output_deps,
-                  '--pin-all-deps'])
-
-    with open(output_deps) as f:
-      deps_contents = f.read()
-
-    self.assertEqual([
-        'gclient_gn_args_file = "src/repo2/gclient.args"',
-        'gclient_gn_args = [\'false_var\', \'false_str_var\', \'true_var\', '
-        '\'true_str_var\', \'str_var\', \'cond_var\']',
-        'allowed_hosts = [',
-        '  "' + self.git_base + '",',
-        ']',
-        '',
-        'deps = {',
-        '  # "src" -> "src/repo2" -> "foo/bar"',
-        '  "foo/bar": {',
-        '    "url": "' + self.git_base + 'repo_3@%s",' %
-        (self.githash('repo_3', 2)),
-        '    "condition": \'(repo2_false_var) and (true_str_var)\',',
-        '  },',
-        '',
-        '  # "src"',
-        '  "src": {',
-        '    "url": "' + self.git_base + 'repo_6@%s",' %
-        (self.githash('repo_6', 1)),
-        '  },',
-        '',
-        '  # "src" -> "src/mac_repo"',
-        '  "src/mac_repo": {',
-        '    "url": "' + self.git_base + 'repo_5@%s",' %
-        (self.githash('repo_5', 3)),
-        '    "condition": \'checkout_mac\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo8" -> "src/recursed_os_repo"',
-        '  "src/recursed_os_repo": {',
-        '    "url": "' + self.git_base + 'repo_5@%s",' %
-        (self.githash('repo_5', 3)),
-        '    "condition": \'(checkout_linux) or (checkout_mac)\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo15"',
-        '  "src/repo15": {',
-        '    "url": "' + self.git_base + 'repo_15@%s",' %
-        (self.githash('repo_15', 1)),
-        '  },',
-        '',
-        '  # "src" -> "src/repo16"',
-        '  "src/repo16": {',
-        '    "url": "' + self.git_base + 'repo_16@%s",' %
-        (self.githash('repo_16', 1)),
-        '  },',
-        '',
-        '  # "src" -> "src/repo2"',
-        '  "src/repo2": {',
-        '    "url": "' + self.git_base + 'repo_2@%s",' %
-        (self.githash('repo_2', 1)),
-        '    "condition": \'true_str_var\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo4"',
-        '  "src/repo4": {',
-        '    "url": "' + self.git_base + 'repo_4@%s",' %
-        (self.githash('repo_4', 2)),
-        '    "condition": \'False\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo8"',
-        '  "src/repo8": {',
-        '    "url": "' + self.git_base + 'repo_8@%s",' %
-        (self.githash('repo_8', 1)),
-        '  },',
-        '',
-        '  # "src" -> "src/unix_repo"',
-        '  "src/unix_repo": {',
-        '    "url": "' + self.git_base + 'repo_5@%s",' %
-        (self.githash('repo_5', 3)),
-        '    "condition": \'checkout_linux\',',
-        '  },',
-        '',
-        '  # "src" -> "src/win_repo"',
-        '  "src/win_repo": {',
-        '    "url": "' + self.git_base + 'repo_5@%s",' %
-        (self.githash('repo_5', 3)),
-        '    "condition": \'checkout_win\',',
-        '  },',
-        '',
-        '}',
-        '',
-        'hooks = [',
-        '  # "src"',
-        '  {',
-        '    "pattern": ".",',
-        '    "condition": \'True\',',
-        '    "cwd": ".",',
-        '    "action": [',
-        '        "python3",',
-        '        "-c",',
-        '        "open(\'src/git_hooked1\', \'w\')'
-        '.write(\'git_hooked1\')",',
-        '    ]',
-        '  },',
-        '',
-        '  # "src"',
-        '  {',
-        '    "pattern": "nonexistent",',
-        '    "cwd": ".",',
-        '    "action": [',
-        '        "python3",',
-        '        "-c",',
-        '        "open(\'src/git_hooked2\', \'w\').write(\'git_hooked2\')",',
-        '    ]',
-        '  },',
-        '',
-        '  # "src"',
-        '  {',
-        '    "pattern": ".",',
-        '    "condition": \'checkout_mac\',',
-        '    "cwd": ".",',
-        '    "action": [',
-        '        "python3",',
-        '        "-c",',
-        '        "open(\'src/git_hooked_mac\', \'w\').write('
-        '\'git_hooked_mac\')",',
-        '    ]',
-        '  },',
-        '',
-        '  # "src" -> "src/repo15"',
-        '  {',
-        '    "name": "absolute_cwd",',
-        '    "pattern": ".",',
-        '    "cwd": ".",',
-        '    "action": [',
-        '        "python3",',
-        '        "-c",',
-        '        "pass",',
-        '    ]',
-        '  },',
-        '',
-        '  # "src" -> "src/repo16"',
-        '  {',
-        '    "name": "relative_cwd",',
-        '    "pattern": ".",',
-        '    "cwd": "src/repo16",',
-        '    "action": [',
-        '        "python3",',
-        '        "relative.py",',
-        '    ]',
-        '  },',
-        '',
-        ']',
-        '',
-        'vars = {',
-        '  # "src"',
-        '  "DummyVariable": \'repo\',',
-        '',
-        '  # "src"',
-        '  "cond_var": \'false_str_var and true_var\',',
-        '',
-        '  # "src"',
-        '  "false_str_var": \'False\',',
-        '',
-        '  # "src"',
-        '  "false_var": False,',
-        '',
-        '  # "src"',
-        '  "git_base": \'' + self.git_base + '\',',
-        '',
-        '  # "src"',
-        '  "hook1_contents": \'git_hooked1\',',
-        '',
-        '  # "src" -> "src/repo2"',
-        '  "repo2_false_var": \'False\',',
-        '',
-        '  # "src"',
-        '  "repo5_var": \'/repo_5\',',
-        '',
-        '  # "src"',
-        '  "str_var": \'abc\',',
-        '',
-        '  # "src"',
-        '  "true_str_var": \'True\',',
-        '',
-        '  # "src"',
-        '  "true_var": True,',
-        '',
-        '}',
-        '',
-        '# ' + self.git_base + 'repo_15@%s, DEPS' %
-        (self.githash('repo_15', 1)),
-        '# ' + self.git_base + 'repo_16@%s, DEPS' %
-        (self.githash('repo_16', 1)),
-        '# ' + self.git_base + 'repo_2@%s, DEPS' % (self.githash('repo_2', 1)),
-        '# ' + self.git_base + 'repo_6@%s, DEPS' % (self.githash('repo_6', 1)),
-        '# ' + self.git_base + 'repo_8@%s, DEPS' % (self.githash('repo_8', 1)),
-    ], deps_contents.splitlines())
-
-  # TODO(crbug.com/1024683): Enable for windows.
-  @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
-  def testFlattenRecursedeps(self):
-    output_deps = os.path.join(self.root_dir, 'DEPS.flattened')
-    self.assertFalse(os.path.exists(output_deps))
-
-    output_deps_files = os.path.join(self.root_dir, 'DEPS.files')
-    self.assertFalse(os.path.exists(output_deps_files))
-
-    self.gclient(['config', self.git_base + 'repo_10', '--name', 'src'])
-    self.gclient(['sync', '--process-all-deps'])
-    self.gclient(['flatten', '-v', '-v', '-v',
-                  '--output-deps', output_deps,
-                  '--output-deps-files', output_deps_files])
-
-    with open(output_deps) as f:
-      deps_contents = f.read()
-
-    self.assertEqual([
-        'gclient_gn_args_file = "src/repo8/gclient.args"',
-        "gclient_gn_args = ['str_var']",
-        'deps = {',
-        '  # "src"',
-        '  "src": {',
-        '    "url": "' + self.git_base + 'repo_10",',
-        '  },',
-        '',
-        '  # "src" -> "src/repo9" -> "src/repo8" -> "src/recursed_os_repo"',
-        '  "src/recursed_os_repo": {',
-        '    "url": "' + self.git_base + 'repo_5",',
-        '    "condition": \'(checkout_linux) or (checkout_mac)\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo11"',
-        '  "src/repo11": {',
-        '    "url": "' + self.git_base + 'repo_11",',
-        '    "condition": \'(checkout_ios) or (checkout_mac)\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo11" -> "src/repo12"',
-        '  "src/repo12": {',
-        '    "url": "' + self.git_base + 'repo_12",',
-        '    "condition": \'(checkout_ios) or (checkout_mac)\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo9" -> "src/repo4"',
-        '  "src/repo4": {',
-        '    "url": "' + self.git_base + 'repo_4",',
-        '    "condition": \'checkout_android\',',
-        '  },',
-        '',
-        '  # "src" -> "src/repo6"',
-        '  "src/repo6": {',
-        '    "url": "' + self.git_base + 'repo_6",',
-        '  },',
-        '',
-        '  # "src" -> "src/repo9" -> "src/repo7"',
-        '  "src/repo7": {',
-        '    "url": "' + self.git_base + 'repo_7",',
-        '  },',
-        '',
-        '  # "src" -> "src/repo9" -> "src/repo8"',
-        '  "src/repo8": {',
-        '    "url": "' + self.git_base + 'repo_8",',
-        '  },',
-        '',
-        '  # "src" -> "src/repo9"',
-        '  "src/repo9": {',
-        '    "url": "' + self.git_base + 'repo_9",',
-        '  },',
-        '',
-        '}',
-        '',
-        'vars = {',
-        '  # "src" -> "src/repo9"',
-        '  "str_var": \'xyz\',',
-        '',
-        '}',
-        '',
-        '# ' + self.git_base + 'repo_10, DEPS',
-        '# ' + self.git_base + 'repo_11, DEPS',
-        '# ' + self.git_base + 'repo_8, DEPS',
-        '# ' + self.git_base + 'repo_9, DEPS',
-    ], deps_contents.splitlines())
-
-    with open(output_deps_files) as f:
-      deps_files_contents = json.load(f)
-
-    self.assertEqual([
-      {'url': self.git_base + 'repo_10', 'deps_file': 'DEPS',
-       'hierarchy': [['src', self.git_base + 'repo_10']]},
-      {'url': self.git_base + 'repo_11', 'deps_file': 'DEPS',
-       'hierarchy': [['src', self.git_base + 'repo_10'],
-                     ['src/repo11', self.git_base + 'repo_11']]},
-      {'url': self.git_base + 'repo_8', 'deps_file': 'DEPS',
-       'hierarchy': [['src', self.git_base + 'repo_10'],
-                     ['src/repo9', self.git_base + 'repo_9'],
-                     ['src/repo8', self.git_base + 'repo_8']]},
-      {'url': self.git_base + 'repo_9', 'deps_file': 'DEPS',
-       'hierarchy': [['src', self.git_base + 'repo_10'],
-                     ['src/repo9', self.git_base + 'repo_9']]},
-    ], deps_files_contents)
-
-  # TODO(crbug.com/1024683): Enable for windows.
-  @unittest.skipIf(sys.platform == 'win32', 'not yet fixed on win')
-  def testFlattenCipd(self):
-    output_deps = os.path.join(self.root_dir, 'DEPS.flattened')
-    self.assertFalse(os.path.exists(output_deps))
-
-    self.gclient(['config', self.git_base + 'repo_14', '--name', 'src'])
-    self.gclient(['sync'])
-    self.gclient(['flatten', '-v', '-v', '-v', '--output-deps', output_deps])
-
-    with open(output_deps) as f:
-      deps_contents = f.read()
-
-    self.assertEqual([
-        'deps = {',
-        '  # "src"',
-        '  "src": {',
-        '    "url": "' + self.git_base + 'repo_14",',
-        '  },',
-        '',
-        '  # "src" -> src/another_cipd_dep',
-        '  "src/another_cipd_dep": {',
-        '    "packages": [',
-        '      {',
-        '        "package": "package1",',
-        '        "version": "1.1-cr0",',
-        '      },',
-        '      {',
-        '        "package": "package2",',
-        '        "version": "1.13",',
-        '      },',
-        '    ],',
-        '    "dep_type": "cipd",',
-        '  },',
-        '',
-        '  # "src" -> src/cipd_dep',
-        '  "src/cipd_dep": {',
-        '    "packages": [',
-        '      {',
-        '        "package": "package0",',
-        '        "version": "0.1",',
-        '      },',
-        '    ],',
-        '    "dep_type": "cipd",',
-        '  },',
-        '',
-        '  # "src" -> src/cipd_dep_with_cipd_variable',
-        '  "src/cipd_dep_with_cipd_variable": {',
-        '    "packages": [',
-        '      {',
-        '        "package": "package3/${{platform}}",',
-        '        "version": "1.2",',
-        '      },',
-        '    ],',
-        '    "dep_type": "cipd",',
-        '  },',
-        '',
-        '}',
-        '',
-        '# ' + self.git_base + 'repo_14, DEPS',
-    ], deps_contents.splitlines())
-
-  def testRelativeGNArgsFile(self):
-    self.gclient(['config', self.git_base + 'repo_17', '--name', 'src'])
-    self.gclient(['sync',])
-
-    tree = self.mangle_git_tree(('repo_17@1', 'src'))
-    tree['src/repo17_gclient.args'] = '\n'.join([
-        '# Generated from \'DEPS\'',
-        'toto = "tata"',
-    ])
-    self.assertTree(tree)
 
 if __name__ == '__main__':
-  if '-v' in sys.argv:
-    logging.basicConfig(level=logging.DEBUG)
-  unittest.main()
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()

+ 247 - 253
tests/gclient_no_sync_smoketest.py

@@ -24,269 +24,263 @@ from testing_support import fake_repos
 
 
 def write(filename, content):
-  """Writes the content of a file and create the directories as needed."""
-  filename = os.path.abspath(filename)
-  dirname = os.path.dirname(filename)
-  if not os.path.isdir(dirname):
-    os.makedirs(dirname)
-  with open(filename, 'w') as f:
-    f.write(content)
+    """Writes the content of a file and create the directories as needed."""
+    filename = os.path.abspath(filename)
+    dirname = os.path.dirname(filename)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+    with open(filename, 'w') as f:
+        f.write(content)
 
 
 class GClientSmokeGIT(gclient_smoketest_base.GClientSmokeBase):
-  """Smoke tests for the no-sync experiment."""
-
-  FAKE_REPOS_CLASS = fake_repos.FakeRepoNoSyncDEPS
-
-  def setUp(self):
-    super(GClientSmokeGIT, self).setUp()
-    self.env['PATH'] = (os.path.join(ROOT_DIR, 'testing_support') + os.pathsep +
-                        self.env['PATH'])
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    if not self.enabled:
-      self.skipTest('git fake repos not available')
-
-  def testNoSync_SkipSyncNoDEPSChange(self):
-    """No DEPS changes will skip sync"""
-    config_template = ''.join([
-        'solutions = [{'
-        '  "name"        : "src",'
-        '  "url"         : %(git_base)r + "repo_1",'
-        '  "deps_file"   : "DEPS",'
-        '  "managed"     : True,'
-        '  "custom_vars" : %(custom_vars)s,'
-        '}]'
-    ])
-    self.gclient([
-        'config', '--spec', config_template % {
-            'git_base': self.git_base,
-            'custom_vars': {
-                'mac': True
+    """Smoke tests for the no-sync experiment."""
+
+    FAKE_REPOS_CLASS = fake_repos.FakeRepoNoSyncDEPS
+
+    def setUp(self):
+        super(GClientSmokeGIT, self).setUp()
+        self.env['PATH'] = (os.path.join(ROOT_DIR, 'testing_support') +
+                            os.pathsep + self.env['PATH'])
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        if not self.enabled:
+            self.skipTest('git fake repos not available')
+
+    def testNoSync_SkipSyncNoDEPSChange(self):
+        """No DEPS changes will skip sync"""
+        config_template = ''.join([
+            'solutions = [{'
+            '  "name"        : "src",'
+            '  "url"         : %(git_base)r + "repo_1",'
+            '  "deps_file"   : "DEPS",'
+            '  "managed"     : True,'
+            '  "custom_vars" : %(custom_vars)s,'
+            '}]'
+        ])
+        self.gclient([
+            'config', '--spec', config_template % {
+                'git_base': self.git_base,
+                'custom_vars': {
+                    'mac': True
+                }
             }
-        }
-    ])
-
-    output_json = os.path.join(self.root_dir, 'output.json')
-
-    revision_1 = self.FAKE_REPOS.git_hashes['repo_1'][1][0]  # DEPS 1
-    revision_2 = self.FAKE_REPOS.git_hashes['repo_1'][2][0]  # DEPS 1
-    patch_ref = self.FAKE_REPOS.git_hashes['repo_1'][3][0]  # DEPS 2
-
-    # Previous run did a sync at revision_1
-    write(
-        os.path.join(self.root_dir, gclient.PREVIOUS_SYNC_COMMITS_FILE),
-        json.dumps({'src': revision_1}))
-    write(
-        os.path.join(self.root_dir, gclient.PREVIOUS_CUSTOM_VARS_FILE),
-        json.dumps({'src': {
-            'mac': True
-        }}))
-
-    # We checkout src at revision_2 which has a different DEPS
-    # but that should not matter because patch_ref and revision_1
-    # have the same DEPS
-    self.gclient([
-        'sync', '--output-json', output_json, '--revision',
-        'src@%s' % revision_2, '--patch-ref',
-        '%srepo_1@refs/heads/main:%s' %
-        (self.git_base, patch_ref), '--experiment', 'no-sync'])
-
-    with open(output_json) as f:
-      output_json = json.load(f)
-    expected = {
-        'solutions': {
-            'src/': {
-                'revision': revision_2,
-                'scm': 'git',
-                'url': '%srepo_1' % self.git_base,
-                'was_processed': True,
-                'was_synced': False,
+        ])
+
+        output_json = os.path.join(self.root_dir, 'output.json')
+
+        revision_1 = self.FAKE_REPOS.git_hashes['repo_1'][1][0]  # DEPS 1
+        revision_2 = self.FAKE_REPOS.git_hashes['repo_1'][2][0]  # DEPS 1
+        patch_ref = self.FAKE_REPOS.git_hashes['repo_1'][3][0]  # DEPS 2
+
+        # Previous run did a sync at revision_1
+        write(os.path.join(self.root_dir, gclient.PREVIOUS_SYNC_COMMITS_FILE),
+              json.dumps({'src': revision_1}))
+        write(os.path.join(self.root_dir, gclient.PREVIOUS_CUSTOM_VARS_FILE),
+              json.dumps({'src': {
+                  'mac': True
+              }}))
+
+        # We checkout src at revision_2 which has a different DEPS
+        # but that should not matter because patch_ref and revision_1
+        # have the same DEPS
+        self.gclient([
+            'sync', '--output-json', output_json, '--revision',
+            'src@%s' % revision_2, '--patch-ref',
+            '%srepo_1@refs/heads/main:%s' % (self.git_base, patch_ref),
+            '--experiment', 'no-sync'
+        ])
+
+        with open(output_json) as f:
+            output_json = json.load(f)
+        expected = {
+            'solutions': {
+                'src/': {
+                    'revision': revision_2,
+                    'scm': 'git',
+                    'url': '%srepo_1' % self.git_base,
+                    'was_processed': True,
+                    'was_synced': False,
+                },
             },
-        },
-    }
-    self.assertEqual(expected, output_json)
-
-  def testNoSync_NoSyncNotEnablted(self):
-    """No DEPS changes will skip sync"""
-    config_template = ''.join([
-        'solutions = [{'
-        '  "name"        : "src",'
-        '  "url"         : %(git_base)r + "repo_1",'
-        '  "deps_file"   : "DEPS",'
-        '  "managed"     : True,'
-        '  "custom_vars" : %(custom_vars)s,'
-        '}]'
-    ])
-    self.gclient([
-        'config', '--spec', config_template % {
-            'git_base': self.git_base,
-            'custom_vars': {
-                'mac': True
-            }
         }
-    ])
-
-    output_json = os.path.join(self.root_dir, 'output.json')
-
-    revision_1 = self.FAKE_REPOS.git_hashes['repo_1'][1][0]  # DEPS 1
-    revision_2 = self.FAKE_REPOS.git_hashes['repo_1'][2][0]  # DEPS 1
-    patch_ref = self.FAKE_REPOS.git_hashes['repo_1'][3][0]  # DEPS 2
-
-    # Previous run did a sync at revision_1
-    write(
-        os.path.join(self.root_dir, gclient.PREVIOUS_SYNC_COMMITS_FILE),
-        json.dumps({'src': revision_1}))
-    write(
-        os.path.join(self.root_dir, gclient.PREVIOUS_CUSTOM_VARS_FILE),
-        json.dumps({'src': {
-            'mac': True
-        }}))
-
-    self.gclient([
-        'sync',
-        '--output-json',
-        output_json,
-        '--revision',
-        'src@%s' % revision_2,
-        '--patch-ref',
-        '%srepo_1@refs/heads/main:%s' % (self.git_base, patch_ref)])
-
-    with open(output_json) as f:
-      output_json = json.load(f)
-    repo2_rev = self.FAKE_REPOS.git_hashes['repo_2'][1][0]
-    expected = {
-        'solutions': {
-            'src/': {
-                'revision': revision_2,
-                'scm': 'git',
-                'url': '%srepo_1' % self.git_base,
-                'was_processed': True,
-                'was_synced': True,
-            },
-            'src/repo2/': {
-                'revision': repo2_rev,
-                'scm': 'git',
-                'url': '%srepo_2@%s' % (self.git_base, repo2_rev[:7]),
-                'was_processed': True,
-                'was_synced': True,
-            },
-        },
-    }
-    self.assertEqual(expected, output_json)
-
-  def testNoSync_CustomVarsDiff(self):
-    """We do not skip syncs if there are different custom_vars"""
-    config_template = ''.join([
-        'solutions = [{'
-        '  "name"        : "src",'
-        '  "url"         : %(git_base)r + "repo_1",'
-        '  "deps_file"   : "DEPS",'
-        '  "managed"     : True,'
-        '  "custom_vars" : %(custom_vars)s,'
-        '}]'
-    ])
-    self.gclient([
-        'config', '--spec', config_template % {
-            'git_base': self.git_base,
-            'custom_vars': {
-                'mac': True
+        self.assertEqual(expected, output_json)
+
+    def testNoSync_NoSyncNotEnablted(self):
+        """No DEPS changes will skip sync"""
+        config_template = ''.join([
+            'solutions = [{'
+            '  "name"        : "src",'
+            '  "url"         : %(git_base)r + "repo_1",'
+            '  "deps_file"   : "DEPS",'
+            '  "managed"     : True,'
+            '  "custom_vars" : %(custom_vars)s,'
+            '}]'
+        ])
+        self.gclient([
+            'config', '--spec', config_template % {
+                'git_base': self.git_base,
+                'custom_vars': {
+                    'mac': True
+                }
             }
-        }
-    ])
-
-    output_json = os.path.join(self.root_dir, 'output.json')
-
-    revision_1 = self.FAKE_REPOS.git_hashes['repo_1'][1][0]  # DEPS 1
-    revision_2 = self.FAKE_REPOS.git_hashes['repo_1'][2][0]  # DEPS 2
-    patch_ref = self.FAKE_REPOS.git_hashes['repo_1'][3][0]  # DEPS 1
-
-    # Previous run did a sync at revision_1
-    write(
-        os.path.join(self.root_dir, gclient.PREVIOUS_SYNC_COMMITS_FILE),
-        json.dumps({'src': revision_1}))
-    # No PREVIOUS_CUSTOM_VARS
-
-    # We checkout src at revision_2 which has a different DEPS
-    # but that should not matter because patch_ref and revision_1
-    # have the same DEPS
-    self.gclient([
-        'sync', '--output-json', output_json, '--revision',
-        'src@%s' % revision_2, '--patch-ref',
-        '%srepo_1@refs/heads/main:%s' %
-        (self.git_base, patch_ref), '--experiment', 'no-sync'])
-
-    with open(output_json) as f:
-      output_json = json.load(f)
-    repo2_rev = self.FAKE_REPOS.git_hashes['repo_2'][1][0]
-    expected = {
-        'solutions': {
-            'src/': {
-                'revision': revision_2,
-                'scm': 'git',
-                'url': '%srepo_1' % self.git_base,
-                'was_processed': True,
-                'was_synced': True,
+        ])
+
+        output_json = os.path.join(self.root_dir, 'output.json')
+
+        revision_1 = self.FAKE_REPOS.git_hashes['repo_1'][1][0]  # DEPS 1
+        revision_2 = self.FAKE_REPOS.git_hashes['repo_1'][2][0]  # DEPS 1
+        patch_ref = self.FAKE_REPOS.git_hashes['repo_1'][3][0]  # DEPS 2
+
+        # Previous run did a sync at revision_1
+        write(os.path.join(self.root_dir, gclient.PREVIOUS_SYNC_COMMITS_FILE),
+              json.dumps({'src': revision_1}))
+        write(os.path.join(self.root_dir, gclient.PREVIOUS_CUSTOM_VARS_FILE),
+              json.dumps({'src': {
+                  'mac': True
+              }}))
+
+        self.gclient([
+            'sync', '--output-json', output_json, '--revision',
+            'src@%s' % revision_2, '--patch-ref',
+            '%srepo_1@refs/heads/main:%s' % (self.git_base, patch_ref)
+        ])
+
+        with open(output_json) as f:
+            output_json = json.load(f)
+        repo2_rev = self.FAKE_REPOS.git_hashes['repo_2'][1][0]
+        expected = {
+            'solutions': {
+                'src/': {
+                    'revision': revision_2,
+                    'scm': 'git',
+                    'url': '%srepo_1' % self.git_base,
+                    'was_processed': True,
+                    'was_synced': True,
+                },
+                'src/repo2/': {
+                    'revision': repo2_rev,
+                    'scm': 'git',
+                    'url': '%srepo_2@%s' % (self.git_base, repo2_rev[:7]),
+                    'was_processed': True,
+                    'was_synced': True,
+                },
             },
-            'src/repo2/': {
-                'revision': repo2_rev,
-                'scm': 'git',
-                'url': '%srepo_2@%s' % (self.git_base, repo2_rev[:7]),
-                'was_processed': True,
-                'was_synced': True,
-            },
-        },
-    }
-    self.assertEqual(expected, output_json)
-
-  def testNoSync_DEPSDiff(self):
-    """We do not skip syncs if there are DEPS changes."""
-    self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
-
-    output_json = os.path.join(self.root_dir, 'output.json')
-
-    revision_1 = self.FAKE_REPOS.git_hashes['repo_1'][1][0]  # DEPS 1
-    revision_2 = self.FAKE_REPOS.git_hashes['repo_1'][2][0]  # DEPS 2
-    patch_ref = self.FAKE_REPOS.git_hashes['repo_1'][3][0]  # DEPS 1
-
-    # Previous run did a sync at revision_1
-    write(
-        os.path.join(self.root_dir, gclient.PREVIOUS_SYNC_COMMITS_FILE),
-        json.dumps({'src': revision_2}))
-
-    # We checkout src at revision_1 which has the same DEPS
-    # but that should not matter because patch_ref and revision_2
-    # have different DEPS
-    self.gclient([
-        'sync', '--output-json', output_json, '--revision',
-        'src@%s' % revision_1, '--patch-ref',
-        '%srepo_1@refs/heads/main:%s' %
-        (self.git_base, patch_ref), '--experiment', 'no-sync'])
-
-    with open(output_json) as f:
-      output_json = json.load(f)
-    repo2_rev = self.FAKE_REPOS.git_hashes['repo_2'][1][0]
-    expected = {
-        'solutions': {
-            'src/': {
-                'revision': revision_1,
-                'scm': 'git',
-                'url': '%srepo_1' % self.git_base,
-                'was_processed': True,
-                'was_synced': True,
+        }
+        self.assertEqual(expected, output_json)
+
+    def testNoSync_CustomVarsDiff(self):
+        """We do not skip syncs if there are different custom_vars"""
+        config_template = ''.join([
+            'solutions = [{'
+            '  "name"        : "src",'
+            '  "url"         : %(git_base)r + "repo_1",'
+            '  "deps_file"   : "DEPS",'
+            '  "managed"     : True,'
+            '  "custom_vars" : %(custom_vars)s,'
+            '}]'
+        ])
+        self.gclient([
+            'config', '--spec', config_template % {
+                'git_base': self.git_base,
+                'custom_vars': {
+                    'mac': True
+                }
+            }
+        ])
+
+        output_json = os.path.join(self.root_dir, 'output.json')
+
+        revision_1 = self.FAKE_REPOS.git_hashes['repo_1'][1][0]  # DEPS 1
+        revision_2 = self.FAKE_REPOS.git_hashes['repo_1'][2][0]  # DEPS 2
+        patch_ref = self.FAKE_REPOS.git_hashes['repo_1'][3][0]  # DEPS 1
+
+        # Previous run did a sync at revision_1
+        write(os.path.join(self.root_dir, gclient.PREVIOUS_SYNC_COMMITS_FILE),
+              json.dumps({'src': revision_1}))
+        # No PREVIOUS_CUSTOM_VARS
+
+        # We checkout src at revision_2 which has a different DEPS
+        # but that should not matter because patch_ref and revision_1
+        # have the same DEPS
+        self.gclient([
+            'sync', '--output-json', output_json, '--revision',
+            'src@%s' % revision_2, '--patch-ref',
+            '%srepo_1@refs/heads/main:%s' % (self.git_base, patch_ref),
+            '--experiment', 'no-sync'
+        ])
+
+        with open(output_json) as f:
+            output_json = json.load(f)
+        repo2_rev = self.FAKE_REPOS.git_hashes['repo_2'][1][0]
+        expected = {
+            'solutions': {
+                'src/': {
+                    'revision': revision_2,
+                    'scm': 'git',
+                    'url': '%srepo_1' % self.git_base,
+                    'was_processed': True,
+                    'was_synced': True,
+                },
+                'src/repo2/': {
+                    'revision': repo2_rev,
+                    'scm': 'git',
+                    'url': '%srepo_2@%s' % (self.git_base, repo2_rev[:7]),
+                    'was_processed': True,
+                    'was_synced': True,
+                },
             },
-            'src/repo2/': {
-                'revision': repo2_rev,
-                'scm': 'git',
-                'url': '%srepo_2@%s' % (self.git_base, repo2_rev[:7]),
-                'was_processed': True,
-                'was_synced': True,
+        }
+        self.assertEqual(expected, output_json)
+
+    def testNoSync_DEPSDiff(self):
+        """We do not skip syncs if there are DEPS changes."""
+        self.gclient(['config', self.git_base + 'repo_1', '--name', 'src'])
+
+        output_json = os.path.join(self.root_dir, 'output.json')
+
+        revision_1 = self.FAKE_REPOS.git_hashes['repo_1'][1][0]  # DEPS 1
+        revision_2 = self.FAKE_REPOS.git_hashes['repo_1'][2][0]  # DEPS 2
+        patch_ref = self.FAKE_REPOS.git_hashes['repo_1'][3][0]  # DEPS 1
+
+        # Previous run did a sync at revision_1
+        write(os.path.join(self.root_dir, gclient.PREVIOUS_SYNC_COMMITS_FILE),
+              json.dumps({'src': revision_2}))
+
+        # We checkout src at revision_1 which has the same DEPS
+        # but that should not matter because patch_ref and revision_2
+        # have different DEPS
+        self.gclient([
+            'sync', '--output-json', output_json, '--revision',
+            'src@%s' % revision_1, '--patch-ref',
+            '%srepo_1@refs/heads/main:%s' % (self.git_base, patch_ref),
+            '--experiment', 'no-sync'
+        ])
+
+        with open(output_json) as f:
+            output_json = json.load(f)
+        repo2_rev = self.FAKE_REPOS.git_hashes['repo_2'][1][0]
+        expected = {
+            'solutions': {
+                'src/': {
+                    'revision': revision_1,
+                    'scm': 'git',
+                    'url': '%srepo_1' % self.git_base,
+                    'was_processed': True,
+                    'was_synced': True,
+                },
+                'src/repo2/': {
+                    'revision': repo2_rev,
+                    'scm': 'git',
+                    'url': '%srepo_2@%s' % (self.git_base, repo2_rev[:7]),
+                    'was_processed': True,
+                    'was_synced': True,
+                },
             },
-        },
-    }
-    self.assertEqual(expected, output_json)
+        }
+        self.assertEqual(expected, output_json)
 
 
 if __name__ == '__main__':
-  if '-v' in sys.argv:
-    logging.basicConfig(level=logging.DEBUG)
-  unittest.main()
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()

+ 173 - 176
tests/gclient_paths_test.py

@@ -8,7 +8,6 @@ import os
 import sys
 import unittest
 
-
 sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 
 from io import StringIO
@@ -18,235 +17,233 @@ import gclient_paths
 import gclient_utils
 import subprocess2
 
-
-EXCEPTION = subprocess2.CalledProcessError(
-    128, ['cmd'], 'cwd', 'stdout', 'stderr')
+EXCEPTION = subprocess2.CalledProcessError(128, ['cmd'], 'cwd', 'stdout',
+                                           'stderr')
 
 
 class TestBase(unittest.TestCase):
-  def setUp(self):
-    super(TestBase, self).setUp()
-    self.file_tree = {}
-    self.root = 'C:\\' if sys.platform == 'win32' else '/'
-    self.cwd = self.root
-    mock.patch('gclient_utils.FileRead', self.read).start()
-    mock.patch('os.environ', {}).start()
-    mock.patch('os.getcwd', self.getcwd).start()
-    mock.patch('os.path.exists', self.exists).start()
-    mock.patch('os.path.realpath', side_effect=lambda path: path).start()
-    mock.patch('subprocess2.check_output').start()
-    mock.patch('sys.platform', '').start()
-    mock.patch('sys.stderr', StringIO()).start()
-    self.addCleanup(mock.patch.stopall)
-
-  def getcwd(self):
-    return self.cwd
-
-  def exists(self, path):
-    return path in self.file_tree
-
-  def read(self, path):
-    return self.file_tree[path]
-
-  def make_file_tree(self, file_tree):
-    self.file_tree = {
-      self.root + path: content
-      for path, content in file_tree.items()
-    }
+    def setUp(self):
+        super(TestBase, self).setUp()
+        self.file_tree = {}
+        self.root = 'C:\\' if sys.platform == 'win32' else '/'
+        self.cwd = self.root
+        mock.patch('gclient_utils.FileRead', self.read).start()
+        mock.patch('os.environ', {}).start()
+        mock.patch('os.getcwd', self.getcwd).start()
+        mock.patch('os.path.exists', self.exists).start()
+        mock.patch('os.path.realpath', side_effect=lambda path: path).start()
+        mock.patch('subprocess2.check_output').start()
+        mock.patch('sys.platform', '').start()
+        mock.patch('sys.stderr', StringIO()).start()
+        self.addCleanup(mock.patch.stopall)
+
+    def getcwd(self):
+        return self.cwd
+
+    def exists(self, path):
+        return path in self.file_tree
+
+    def read(self, path):
+        return self.file_tree[path]
+
+    def make_file_tree(self, file_tree):
+        self.file_tree = {
+            self.root + path: content
+            for path, content in file_tree.items()
+        }
 
 
 class FindGclientRootTest(TestBase):
-  def testFindGclientRoot(self):
-    self.make_file_tree({'.gclient': ''})
-    self.assertEqual(self.root, gclient_paths.FindGclientRoot(self.root))
-
-  def testGclientRootInParentDir(self):
-    self.make_file_tree({
-      '.gclient': '',
-      '.gclient_entries': 'entries = {"foo": "..."}',
-    })
-    self.assertEqual(
-        self.root,
-        gclient_paths.FindGclientRoot(os.path.join(self.root, 'foo', 'bar')))
-
-  def testGclientRootInParentDir_NotInGclientEntries(self):
-    self.make_file_tree({
-      '.gclient': '',
-      '.gclient_entries': 'entries = {"foo": "..."}',
-    })
-    self.assertIsNone(
-        gclient_paths.FindGclientRoot(os.path.join(self.root, 'bar', 'baz')))
-
-  def testGclientRootInParentDir_NoGclientEntriesFile(self):
-    self.make_file_tree({'.gclient': ''})
-    self.assertEqual(
-        self.root,
-        gclient_paths.FindGclientRoot(os.path.join(self.root, 'x', 'y', 'z')))
-    self.assertEqual(
-        '%s missing, .gclient file in parent directory %s might not be the '
-        'file you want to use.\n' % (
-            os.path.join(self.root, '.gclient_entries'), self.root),
-        sys.stderr.getvalue())
-
-  def testGclientRootInParentDir_ErrorWhenParsingEntries(self):
-    self.make_file_tree({'.gclient': '', '.gclient_entries': ':P'})
-    with self.assertRaises(Exception):
-      gclient_paths.FindGclientRoot(os.path.join(self.root, 'foo', 'bar'))
-
-  def testRootNotFound(self):
-    self.assertIsNone(
-        gclient_paths.FindGclientRoot(os.path.join(self.root, 'x', 'y', 'z')))
+    def testFindGclientRoot(self):
+        self.make_file_tree({'.gclient': ''})
+        self.assertEqual(self.root, gclient_paths.FindGclientRoot(self.root))
+
+    def testGclientRootInParentDir(self):
+        self.make_file_tree({
+            '.gclient': '',
+            '.gclient_entries': 'entries = {"foo": "..."}',
+        })
+        self.assertEqual(
+            self.root,
+            gclient_paths.FindGclientRoot(os.path.join(self.root, 'foo',
+                                                       'bar')))
+
+    def testGclientRootInParentDir_NotInGclientEntries(self):
+        self.make_file_tree({
+            '.gclient': '',
+            '.gclient_entries': 'entries = {"foo": "..."}',
+        })
+        self.assertIsNone(
+            gclient_paths.FindGclientRoot(os.path.join(self.root, 'bar',
+                                                       'baz')))
+
+    def testGclientRootInParentDir_NoGclientEntriesFile(self):
+        self.make_file_tree({'.gclient': ''})
+        self.assertEqual(
+            self.root,
+            gclient_paths.FindGclientRoot(os.path.join(self.root, 'x', 'y',
+                                                       'z')))
+        self.assertEqual(
+            '%s missing, .gclient file in parent directory %s might not be the '
+            'file you want to use.\n' %
+            (os.path.join(self.root, '.gclient_entries'), self.root),
+            sys.stderr.getvalue())
+
+    def testGclientRootInParentDir_ErrorWhenParsingEntries(self):
+        self.make_file_tree({'.gclient': '', '.gclient_entries': ':P'})
+        with self.assertRaises(Exception):
+            gclient_paths.FindGclientRoot(os.path.join(self.root, 'foo', 'bar'))
+
+    def testRootNotFound(self):
+        self.assertIsNone(
+            gclient_paths.FindGclientRoot(os.path.join(self.root, 'x', 'y',
+                                                       'z')))
 
 
 class GetGClientPrimarySolutionNameTest(TestBase):
-  def testGetGClientPrimarySolutionName(self):
-    self.make_file_tree({'.gclient': 'solutions = [{"name": "foo"}]'})
-    self.assertEqual(
-      'foo', gclient_paths.GetGClientPrimarySolutionName(self.root))
+    def testGetGClientPrimarySolutionName(self):
+        self.make_file_tree({'.gclient': 'solutions = [{"name": "foo"}]'})
+        self.assertEqual('foo',
+                         gclient_paths.GetGClientPrimarySolutionName(self.root))
 
-  def testNoSolutionsInGclientFile(self):
-    self.make_file_tree({'.gclient': ''})
-    self.assertIsNone(gclient_paths.GetGClientPrimarySolutionName(self.root))
+    def testNoSolutionsInGclientFile(self):
+        self.make_file_tree({'.gclient': ''})
+        self.assertIsNone(gclient_paths.GetGClientPrimarySolutionName(
+            self.root))
 
 
 class GetPrimarySolutionPathTest(TestBase):
-  def testGetPrimarySolutionPath(self):
-    self.make_file_tree({'.gclient': 'solutions = [{"name": "foo"}]'})
+    def testGetPrimarySolutionPath(self):
+        self.make_file_tree({'.gclient': 'solutions = [{"name": "foo"}]'})
 
-    self.assertEqual(
-        os.path.join(self.root, 'foo'), gclient_paths.GetPrimarySolutionPath())
+        self.assertEqual(os.path.join(self.root, 'foo'),
+                         gclient_paths.GetPrimarySolutionPath())
 
-  def testSolutionNameDefaultsToSrc(self):
-    self.make_file_tree({'.gclient': ''})
+    def testSolutionNameDefaultsToSrc(self):
+        self.make_file_tree({'.gclient': ''})
 
-    self.assertEqual(
-        os.path.join(self.root, 'src'), gclient_paths.GetPrimarySolutionPath())
+        self.assertEqual(os.path.join(self.root, 'src'),
+                         gclient_paths.GetPrimarySolutionPath())
 
-  def testGclientRootNotFound_GitRootHasBuildtools(self):
-    self.make_file_tree({os.path.join('foo', 'buildtools'): ''})
-    self.cwd = os.path.join(self.root, 'foo', 'bar')
-    subprocess2.check_output.return_value = (
-        os.path.join(self.root, 'foo').replace(os.sep, '/').encode('utf-8')
-        + b'\n')
+    def testGclientRootNotFound_GitRootHasBuildtools(self):
+        self.make_file_tree({os.path.join('foo', 'buildtools'): ''})
+        self.cwd = os.path.join(self.root, 'foo', 'bar')
+        subprocess2.check_output.return_value = (os.path.join(
+            self.root, 'foo').replace(os.sep, '/').encode('utf-8') + b'\n')
 
-    self.assertEqual(
-        os.path.join(self.root, 'foo'), gclient_paths.GetPrimarySolutionPath())
+        self.assertEqual(os.path.join(self.root, 'foo'),
+                         gclient_paths.GetPrimarySolutionPath())
 
-  def testGclientRootNotFound_NoBuildtools(self):
-    self.cwd = os.path.join(self.root, 'foo', 'bar')
-    subprocess2.check_output.return_value = b'/foo\n'
+    def testGclientRootNotFound_NoBuildtools(self):
+        self.cwd = os.path.join(self.root, 'foo', 'bar')
+        subprocess2.check_output.return_value = b'/foo\n'
 
-    self.assertIsNone(gclient_paths.GetPrimarySolutionPath())
+        self.assertIsNone(gclient_paths.GetPrimarySolutionPath())
 
-  def testGclientRootNotFound_NotInAGitRepo_CurrentDirHasBuildtools(self):
-    self.make_file_tree({os.path.join('foo', 'bar', 'buildtools'): ''})
-    self.cwd = os.path.join(self.root, 'foo', 'bar')
-    subprocess2.check_output.side_effect = EXCEPTION
+    def testGclientRootNotFound_NotInAGitRepo_CurrentDirHasBuildtools(self):
+        self.make_file_tree({os.path.join('foo', 'bar', 'buildtools'): ''})
+        self.cwd = os.path.join(self.root, 'foo', 'bar')
+        subprocess2.check_output.side_effect = EXCEPTION
 
-    self.assertEqual(self.cwd, gclient_paths.GetPrimarySolutionPath())
+        self.assertEqual(self.cwd, gclient_paths.GetPrimarySolutionPath())
 
-  def testGclientRootNotFound_NotInAGitRepo_NoBuildtools(self):
-    self.cwd = os.path.join(self.root, 'foo')
-    subprocess2.check_output.side_effect = EXCEPTION
+    def testGclientRootNotFound_NotInAGitRepo_NoBuildtools(self):
+        self.cwd = os.path.join(self.root, 'foo')
+        subprocess2.check_output.side_effect = EXCEPTION
 
-    self.assertIsNone(gclient_paths.GetPrimarySolutionPath())
+        self.assertIsNone(gclient_paths.GetPrimarySolutionPath())
 
 
 class GetBuildtoolsPathTest(TestBase):
-  def testEnvVarOverride(self):
-    os.environ = {'CHROMIUM_BUILDTOOLS_PATH': 'foo'}
+    def testEnvVarOverride(self):
+        os.environ = {'CHROMIUM_BUILDTOOLS_PATH': 'foo'}
 
-    self.assertEqual('foo', gclient_paths.GetBuildtoolsPath())
+        self.assertEqual('foo', gclient_paths.GetBuildtoolsPath())
 
-  def testNoSolutionsFound(self):
-    self.cwd = os.path.join(self.root, 'foo', 'bar')
-    subprocess2.check_output.side_effect = EXCEPTION
+    def testNoSolutionsFound(self):
+        self.cwd = os.path.join(self.root, 'foo', 'bar')
+        subprocess2.check_output.side_effect = EXCEPTION
 
-    self.assertIsNone(gclient_paths.GetBuildtoolsPath())
+        self.assertIsNone(gclient_paths.GetBuildtoolsPath())
 
-  def testBuildtoolsInSolution(self):
-    self.make_file_tree({
-      '.gclient': '',
-      os.path.join('src', 'buildtools'): '',
-    })
-    self.cwd = os.path.join(self.root, 'src', 'foo')
+    def testBuildtoolsInSolution(self):
+        self.make_file_tree({
+            '.gclient': '',
+            os.path.join('src', 'buildtools'): '',
+        })
+        self.cwd = os.path.join(self.root, 'src', 'foo')
 
-    self.assertEqual(
-        os.path.join(self.root, 'src', 'buildtools'),
-        gclient_paths.GetBuildtoolsPath())
+        self.assertEqual(os.path.join(self.root, 'src', 'buildtools'),
+                         gclient_paths.GetBuildtoolsPath())
 
-  def testBuildtoolsInGclientRoot(self):
-    self.make_file_tree({'.gclient': '', 'buildtools': ''})
-    self.cwd = os.path.join(self.root, 'src', 'foo')
+    def testBuildtoolsInGclientRoot(self):
+        self.make_file_tree({'.gclient': '', 'buildtools': ''})
+        self.cwd = os.path.join(self.root, 'src', 'foo')
 
-    self.assertEqual(
-        os.path.join(self.root, 'buildtools'),
-        gclient_paths.GetBuildtoolsPath())
+        self.assertEqual(os.path.join(self.root, 'buildtools'),
+                         gclient_paths.GetBuildtoolsPath())
 
-  def testNoBuildtools(self):
-    self.make_file_tree({'.gclient': ''})
-    self.cwd = os.path.join(self.root, 'foo', 'bar')
+    def testNoBuildtools(self):
+        self.make_file_tree({'.gclient': ''})
+        self.cwd = os.path.join(self.root, 'foo', 'bar')
 
-    self.assertIsNone(gclient_paths.GetBuildtoolsPath())
+        self.assertIsNone(gclient_paths.GetBuildtoolsPath())
 
 
 class GetBuildtoolsPlatformBinaryPath(TestBase):
-  def testNoBuildtoolsPath(self):
-    self.make_file_tree({'.gclient': ''})
-    self.cwd = os.path.join(self.root, 'foo', 'bar')
-    self.assertIsNone(gclient_paths.GetBuildtoolsPlatformBinaryPath())
+    def testNoBuildtoolsPath(self):
+        self.make_file_tree({'.gclient': ''})
+        self.cwd = os.path.join(self.root, 'foo', 'bar')
+        self.assertIsNone(gclient_paths.GetBuildtoolsPlatformBinaryPath())
 
-  def testWin(self):
-    self.make_file_tree({'.gclient': '', 'buildtools': ''})
-    sys.platform = 'win'
+    def testWin(self):
+        self.make_file_tree({'.gclient': '', 'buildtools': ''})
+        sys.platform = 'win'
 
-    self.assertEqual(
-        os.path.join(self.root, 'buildtools', 'win'),
-        gclient_paths.GetBuildtoolsPlatformBinaryPath())
+        self.assertEqual(os.path.join(self.root, 'buildtools', 'win'),
+                         gclient_paths.GetBuildtoolsPlatformBinaryPath())
 
-  def testCygwin(self):
-    self.make_file_tree({'.gclient': '', 'buildtools': ''})
-    sys.platform = 'cygwin'
+    def testCygwin(self):
+        self.make_file_tree({'.gclient': '', 'buildtools': ''})
+        sys.platform = 'cygwin'
 
-    self.assertEqual(
-        os.path.join(self.root, 'buildtools', 'win'),
-        gclient_paths.GetBuildtoolsPlatformBinaryPath())
+        self.assertEqual(os.path.join(self.root, 'buildtools', 'win'),
+                         gclient_paths.GetBuildtoolsPlatformBinaryPath())
 
-  def testMac(self):
-    self.make_file_tree({'.gclient': '', 'buildtools': ''})
-    sys.platform = 'darwin'
+    def testMac(self):
+        self.make_file_tree({'.gclient': '', 'buildtools': ''})
+        sys.platform = 'darwin'
 
-    self.assertEqual(
-        os.path.join(self.root, 'buildtools', 'mac'),
-        gclient_paths.GetBuildtoolsPlatformBinaryPath())
+        self.assertEqual(os.path.join(self.root, 'buildtools', 'mac'),
+                         gclient_paths.GetBuildtoolsPlatformBinaryPath())
 
-  def testLinux(self):
-    self.make_file_tree({'.gclient': '', 'buildtools': ''})
-    sys.platform = 'linux'
+    def testLinux(self):
+        self.make_file_tree({'.gclient': '', 'buildtools': ''})
+        sys.platform = 'linux'
 
-    self.assertEqual(
-        os.path.join(self.root, 'buildtools', 'linux64'),
-        gclient_paths.GetBuildtoolsPlatformBinaryPath())
+        self.assertEqual(os.path.join(self.root, 'buildtools', 'linux64'),
+                         gclient_paths.GetBuildtoolsPlatformBinaryPath())
 
-  def testError(self):
-    self.make_file_tree({'.gclient': '', 'buildtools': ''})
-    sys.platform = 'foo'
+    def testError(self):
+        self.make_file_tree({'.gclient': '', 'buildtools': ''})
+        sys.platform = 'foo'
 
-    with self.assertRaises(gclient_utils.Error, msg='Unknown platform: foo'):
-      gclient_paths.GetBuildtoolsPlatformBinaryPath()
+        with self.assertRaises(gclient_utils.Error,
+                               msg='Unknown platform: foo'):
+            gclient_paths.GetBuildtoolsPlatformBinaryPath()
 
 
 class GetExeSuffixTest(TestBase):
-  def testGetExeSuffix(self):
-    sys.platform = 'win'
-    self.assertEqual('.exe', gclient_paths.GetExeSuffix())
+    def testGetExeSuffix(self):
+        sys.platform = 'win'
+        self.assertEqual('.exe', gclient_paths.GetExeSuffix())
 
-    sys.platform = 'cygwin'
-    self.assertEqual('.exe', gclient_paths.GetExeSuffix())
+        sys.platform = 'cygwin'
+        self.assertEqual('.exe', gclient_paths.GetExeSuffix())
 
-    sys.platform = 'foo'
-    self.assertEqual('', gclient_paths.GetExeSuffix())
+        sys.platform = 'foo'
+        self.assertEqual('', gclient_paths.GetExeSuffix())
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 1446 - 1403
tests/gclient_scm_test.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for gclient_scm.py."""
 
 # pylint: disable=E1103
@@ -20,7 +19,6 @@ import tempfile
 import unittest
 from unittest import mock
 
-
 sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 
 import gclient_scm
@@ -30,6 +28,8 @@ import subprocess2
 from testing_support import fake_repos
 from testing_support import test_case_utils
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
 
 GIT = 'git' if sys.platform != 'win32' else 'git.bat'
 
@@ -40,62 +40,65 @@ git_cache.Mirror.SetCachePath(None)
 join = gclient_scm.os.path.join
 
 TIMESTAMP_RE = re.compile(r'\[[0-9]{1,2}:[0-9]{2}:[0-9]{2}\] (.*)', re.DOTALL)
+
+
 def strip_timestamps(value):
-  lines = value.splitlines(True)
-  for i in range(len(lines)):
-    m = TIMESTAMP_RE.match(lines[i])
-    if m:
-      lines[i] = m.group(1)
-  return ''.join(lines)
+    lines = value.splitlines(True)
+    for i in range(len(lines)):
+        m = TIMESTAMP_RE.match(lines[i])
+        if m:
+            lines[i] = m.group(1)
+    return ''.join(lines)
 
 
 class BasicTests(unittest.TestCase):
-  @mock.patch('gclient_scm.scm.GIT.Capture')
-  def testGetFirstRemoteUrl(self, mockCapture):
-    REMOTE_STRINGS = [('remote.origin.url E:\\foo\\bar', 'E:\\foo\\bar'),
-                      ('remote.origin.url /b/foo/bar', '/b/foo/bar'),
-                      ('remote.origin.url https://foo/bar', 'https://foo/bar'),
-                      ('remote.origin.url E:\\Fo Bar\\bax', 'E:\\Fo Bar\\bax'),
-                      ('remote.origin.url git://what/"do', 'git://what/"do')]
-    FAKE_PATH = '/fake/path'
-    mockCapture.side_effect = [question for question, _ in REMOTE_STRINGS]
-
-    for _, answer in REMOTE_STRINGS:
-      self.assertEqual(
-          gclient_scm.SCMWrapper._get_first_remote_url(FAKE_PATH), answer)
-
-    expected_calls = [
-        mock.call(['config', '--local', '--get-regexp', r'remote.*.url'],
-                   cwd=FAKE_PATH)
-        for _ in REMOTE_STRINGS
-    ]
-    self.assertEqual(mockCapture.mock_calls, expected_calls)
+    @mock.patch('gclient_scm.scm.GIT.Capture')
+    def testGetFirstRemoteUrl(self, mockCapture):
+        REMOTE_STRINGS = [
+            ('remote.origin.url E:\\foo\\bar', 'E:\\foo\\bar'),
+            ('remote.origin.url /b/foo/bar', '/b/foo/bar'),
+            ('remote.origin.url https://foo/bar', 'https://foo/bar'),
+            ('remote.origin.url E:\\Fo Bar\\bax', 'E:\\Fo Bar\\bax'),
+            ('remote.origin.url git://what/"do', 'git://what/"do')
+        ]
+        FAKE_PATH = '/fake/path'
+        mockCapture.side_effect = [question for question, _ in REMOTE_STRINGS]
+
+        for _, answer in REMOTE_STRINGS:
+            self.assertEqual(
+                gclient_scm.SCMWrapper._get_first_remote_url(FAKE_PATH), answer)
+
+        expected_calls = [
+            mock.call(['config', '--local', '--get-regexp', r'remote.*.url'],
+                      cwd=FAKE_PATH) for _ in REMOTE_STRINGS
+        ]
+        self.assertEqual(mockCapture.mock_calls, expected_calls)
 
 
 class BaseGitWrapperTestCase(unittest.TestCase, test_case_utils.TestCaseUtils):
-  """This class doesn't use pymox."""
-  class OptionsObject(object):
-    def __init__(self, verbose=False, revision=None):
-      self.auto_rebase = False
-      self.verbose = verbose
-      self.revision = revision
-      self.deps_os = None
-      self.force = False
-      self.reset = False
-      self.nohooks = False
-      self.no_history = False
-      self.upstream = False
-      self.cache_dir = None
-      self.merge = False
-      self.jobs = 1
-      self.break_repo_locks = False
-      self.delete_unversioned_trees = False
-      self.patch_ref = None
-      self.patch_repo = None
-      self.rebase_patch_ref = True
-      self.reset_patch_ref = True
-
-  sample_git_import = """blob
+    """This class doesn't use pymox."""
+    class OptionsObject(object):
+        def __init__(self, verbose=False, revision=None):
+            self.auto_rebase = False
+            self.verbose = verbose
+            self.revision = revision
+            self.deps_os = None
+            self.force = False
+            self.reset = False
+            self.nohooks = False
+            self.no_history = False
+            self.upstream = False
+            self.cache_dir = None
+            self.merge = False
+            self.jobs = 1
+            self.break_repo_locks = False
+            self.delete_unversioned_trees = False
+            self.patch_ref = None
+            self.patch_repo = None
+            self.rebase_patch_ref = True
+            self.reset_patch_ref = True
+
+    sample_git_import = """blob
 mark :1
 data 6
 Hello
@@ -154,1409 +157,1449 @@ M 100644 :7 c
 reset refs/heads/main
 from :3
 """
-  def Options(self, *args, **kwargs):
-    return self.OptionsObject(*args, **kwargs)
-
-  def checkstdout(self, expected):
-    # pylint: disable=no-member
-    value = sys.stdout.getvalue()
-    sys.stdout.close()
-    # Check that the expected output appears.
-    self.assertIn(expected, strip_timestamps(value))
-
-  @staticmethod
-  def CreateGitRepo(git_import, path):
-    """Do it for real."""
-    try:
-      Popen([GIT, 'init', '-q'], stdout=PIPE, stderr=STDOUT,
-            cwd=path).communicate()
-    except OSError:
-      # git is not available, skip this test.
-      return False
-    Popen([GIT, 'fast-import', '--quiet'], stdin=PIPE, stdout=PIPE,
-        stderr=STDOUT, cwd=path).communicate(input=git_import.encode())
-    Popen([GIT, 'checkout', '-q'], stdout=PIPE, stderr=STDOUT,
-        cwd=path).communicate()
-    Popen([GIT, 'remote', 'add', '-f', 'origin', '.'], stdout=PIPE,
-        stderr=STDOUT, cwd=path).communicate()
-    Popen([GIT, 'checkout', '-b', 'new', 'origin/main', '-q'], stdout=PIPE,
-        stderr=STDOUT, cwd=path).communicate()
-    Popen([GIT, 'push', 'origin', 'origin/origin:origin/main', '-q'],
-        stdout=PIPE, stderr=STDOUT, cwd=path).communicate()
-    Popen([GIT, 'config', '--unset', 'remote.origin.fetch'], stdout=PIPE,
-        stderr=STDOUT, cwd=path).communicate()
-    Popen([GIT, 'config', 'user.email', 'someuser@chromium.org'], stdout=PIPE,
-        stderr=STDOUT, cwd=path).communicate()
-    Popen([GIT, 'config', 'user.name', 'Some User'], stdout=PIPE,
-        stderr=STDOUT, cwd=path).communicate()
-    # Set HEAD back to main
-    Popen([GIT, 'checkout', 'main', '-q'],
-          stdout=PIPE,
-          stderr=STDOUT,
-          cwd=path).communicate()
-    return True
-
-  def _GetAskForDataCallback(self, expected_prompt, return_value):
-    def AskForData(prompt, options):
-      self.assertEqual(prompt, expected_prompt)
-      return return_value
-    return AskForData
-
-  def setUp(self):
-    unittest.TestCase.setUp(self)
-    test_case_utils.TestCaseUtils.setUp(self)
-    self.url = 'git://foo'
-    # The .git suffix allows gclient_scm to recognize the dir as a git repo
-    # when cloning it locally
-    self.root_dir = tempfile.mkdtemp('.git')
-    self.relpath = '.'
-    self.base_path = join(self.root_dir, self.relpath)
-    self.enabled = self.CreateGitRepo(self.sample_git_import, self.base_path)
-    mock.patch('sys.stdout', StringIO()).start()
-    self.addCleanup(mock.patch.stopall)
-    self.addCleanup(gclient_utils.rmtree, self.root_dir)
 
+    def Options(self, *args, **kwargs):
+        return self.OptionsObject(*args, **kwargs)
+
+    def checkstdout(self, expected):
+        # pylint: disable=no-member
+        value = sys.stdout.getvalue()
+        sys.stdout.close()
+        # Check that the expected output appears.
+        self.assertIn(expected, strip_timestamps(value))
+
+    @staticmethod
+    def CreateGitRepo(git_import, path):
+        """Do it for real."""
+        try:
+            Popen([GIT, 'init', '-q'], stdout=PIPE, stderr=STDOUT,
+                  cwd=path).communicate()
+        except OSError:
+            # git is not available, skip this test.
+            return False
+        Popen([GIT, 'fast-import', '--quiet'],
+              stdin=PIPE,
+              stdout=PIPE,
+              stderr=STDOUT,
+              cwd=path).communicate(input=git_import.encode())
+        Popen([GIT, 'checkout', '-q'], stdout=PIPE, stderr=STDOUT,
+              cwd=path).communicate()
+        Popen([GIT, 'remote', 'add', '-f', 'origin', '.'],
+              stdout=PIPE,
+              stderr=STDOUT,
+              cwd=path).communicate()
+        Popen([GIT, 'checkout', '-b', 'new', 'origin/main', '-q'],
+              stdout=PIPE,
+              stderr=STDOUT,
+              cwd=path).communicate()
+        Popen([GIT, 'push', 'origin', 'origin/origin:origin/main', '-q'],
+              stdout=PIPE,
+              stderr=STDOUT,
+              cwd=path).communicate()
+        Popen([GIT, 'config', '--unset', 'remote.origin.fetch'],
+              stdout=PIPE,
+              stderr=STDOUT,
+              cwd=path).communicate()
+        Popen([GIT, 'config', 'user.email', 'someuser@chromium.org'],
+              stdout=PIPE,
+              stderr=STDOUT,
+              cwd=path).communicate()
+        Popen([GIT, 'config', 'user.name', 'Some User'],
+              stdout=PIPE,
+              stderr=STDOUT,
+              cwd=path).communicate()
+        # Set HEAD back to main
+        Popen([GIT, 'checkout', 'main', '-q'],
+              stdout=PIPE,
+              stderr=STDOUT,
+              cwd=path).communicate()
+        return True
+
+    def _GetAskForDataCallback(self, expected_prompt, return_value):
+        def AskForData(prompt, options):
+            self.assertEqual(prompt, expected_prompt)
+            return return_value
+
+        return AskForData
+
+    def setUp(self):
+        unittest.TestCase.setUp(self)
+        test_case_utils.TestCaseUtils.setUp(self)
+        self.url = 'git://foo'
+        # The .git suffix allows gclient_scm to recognize the dir as a git repo
+        # when cloning it locally
+        self.root_dir = tempfile.mkdtemp('.git')
+        self.relpath = '.'
+        self.base_path = join(self.root_dir, self.relpath)
+        self.enabled = self.CreateGitRepo(self.sample_git_import,
+                                          self.base_path)
+        mock.patch('sys.stdout', StringIO()).start()
+        self.addCleanup(mock.patch.stopall)
+        self.addCleanup(gclient_utils.rmtree, self.root_dir)
 
-class ManagedGitWrapperTestCase(BaseGitWrapperTestCase):
 
-  @mock.patch('gclient_scm.GitWrapper._IsCog')
-  @mock.patch('gclient_scm.GitWrapper._Run', return_value=True)
-  @mock.patch('gclient_scm.GitWrapper._SetFetchConfig')
-  @mock.patch('gclient_scm.GitWrapper._GetCurrentBranch')
-  def testCloneInCog(self, mockGetCurrentBranch, mockSetFetchConfig, mockRun,
-                     _mockIsCog):
-    """Test that we call the correct commands when in a cog workspace."""
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
-    scm._Clone('123123ab', self.url, options)
-    mockRun.assert_called_once_with(
-        ['citc', 'clone-repo', self.url, scm.checkout_path, '123123ab'],
-        options,
-        cwd=scm._root_dir,
-        retry=True,
-        print_stdout=False,
-        filter_fn=scm.filter)
-    mockSetFetchConfig.assert_called_once()
-    mockGetCurrentBranch.assert_called_once()
-
-  def testRevertMissing(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    file_path = join(self.base_path, 'a')
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_list = []
-    scm.update(options, None, file_list)
-    gclient_scm.os.remove(file_path)
-    file_list = []
-    scm.revert(options, self.args, file_list)
-    self.assertEqual(file_list, [file_path])
-    file_list = []
-    scm.diff(options, self.args, file_list)
-    self.assertEqual(file_list, [])
-    sys.stdout.close()
-
-  def testRevertNone(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_list = []
-    scm.update(options, None, file_list)
-    file_list = []
-    scm.revert(options, self.args, file_list)
-    self.assertEqual(file_list, [])
-    self.assertEqual(scm.revinfo(options, self.args, None),
-                     'a7142dc9f0009350b96a11f372b6ea658592aa95')
-    sys.stdout.close()
-
-  def testRevertModified(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_list = []
-    scm.update(options, None, file_list)
-    file_path = join(self.base_path, 'a')
-    with open(file_path, 'a') as f:
-      f.writelines('touched\n')
-    file_list = []
-    scm.revert(options, self.args, file_list)
-    self.assertEqual(file_list, [file_path])
-    file_list = []
-    scm.diff(options, self.args, file_list)
-    self.assertEqual(file_list, [])
-    self.assertEqual(scm.revinfo(options, self.args, None),
-                      'a7142dc9f0009350b96a11f372b6ea658592aa95')
-    sys.stdout.close()
-
-  def testRevertNew(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_list = []
-    scm.update(options, None, file_list)
-    file_path = join(self.base_path, 'c')
-    with open(file_path, 'w') as f:
-      f.writelines('new\n')
-    Popen([GIT, 'add', 'c'], stdout=PIPE,
-          stderr=STDOUT, cwd=self.base_path).communicate()
-    file_list = []
-    scm.revert(options, self.args, file_list)
-    self.assertEqual(file_list, [file_path])
-    file_list = []
-    scm.diff(options, self.args, file_list)
-    self.assertEqual(file_list, [])
-    self.assertEqual(scm.revinfo(options, self.args, None),
-                     'a7142dc9f0009350b96a11f372b6ea658592aa95')
-    sys.stdout.close()
-
-  def testStatusRef(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    file_paths = [join(self.base_path, 'a')]
-    with open(file_paths[0], 'a') as f:
-      f.writelines('touched\n')
-    scm = gclient_scm.GitWrapper(self.url + '@refs/heads/feature',
-                                 self.root_dir, self.relpath)
-    file_paths.append(join(self.base_path, 'c'))  # feature branch touches c
-    file_list = []
-    scm.status(options, self.args, file_list)
-    self.assertEqual(file_list, file_paths)
-    self.checkstdout(
-        ('\n________ running \'git -c core.quotePath=false diff --name-status '
-         'refs/remotes/origin/feature\' in \'%s\'\n\nM\ta\n') %
-        join(self.root_dir, '.'))
-
-  def testStatusNew(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    file_path = join(self.base_path, 'a')
-    with open(file_path, 'a') as f:
-      f.writelines('touched\n')
-    scm = gclient_scm.GitWrapper(
-        self.url + '@069c602044c5388d2d15c3f875b057c852003458', self.root_dir,
-        self.relpath)
-    file_list = []
-    scm.status(options, self.args, file_list)
-    self.assertEqual(file_list, [file_path])
-    self.checkstdout(
-        ('\n________ running \'git -c core.quotePath=false diff --name-status '
-         '069c602044c5388d2d15c3f875b057c852003458\' in \'%s\'\n\nM\ta\n') %
-            join(self.root_dir, '.'))
-
-
-  def testStatusNewNoBaseRev(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    file_path = join(self.base_path, 'a')
-    with open(file_path, 'a') as f:
-      f.writelines('touched\n')
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
-    file_list = []
-    scm.status(options, self.args, file_list)
-    self.assertEqual(file_list, [file_path])
-    self.checkstdout(
-        ('\n________ running \'git -c core.quotePath=false diff --name-status'
-         '\' in \'%s\'\n\nM\ta\n') % join(self.root_dir, '.'))
-
-  def testStatus2New(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    expected_file_list = []
-    for f in ['a', 'b']:
-      file_path = join(self.base_path, f)
-      with open(file_path, 'a') as f:
-        f.writelines('touched\n')
-      expected_file_list.extend([file_path])
-    scm = gclient_scm.GitWrapper(
-        self.url + '@069c602044c5388d2d15c3f875b057c852003458', self.root_dir,
-        self.relpath)
-    file_list = []
-    scm.status(options, self.args, file_list)
-    expected_file_list = [join(self.base_path, x) for x in ['a', 'b']]
-    self.assertEqual(sorted(file_list), expected_file_list)
-    self.checkstdout(
-        ('\n________ running \'git -c core.quotePath=false diff --name-status '
-         '069c602044c5388d2d15c3f875b057c852003458\' in \'%s\'\n\nM\ta\nM\tb\n')
-            % join(self.root_dir, '.'))
-
-  def testUpdateUpdate(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    expected_file_list = [join(self.base_path, x) for x in ['a', 'b']]
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_list = []
-
-    scm.update(options, (), file_list)
-    self.assertEqual(file_list, expected_file_list)
-    self.assertEqual(scm.revinfo(options, (), None),
-                      'a7142dc9f0009350b96a11f372b6ea658592aa95')
-    self.assertEqual(scm._Capture(['config', '--get', 'diff.ignoreSubmodules']),
-                     'dirty')
-    self.assertEqual(
-        scm._Capture(['config', '--get', 'fetch.recurseSubmodules']), 'off')
-    sys.stdout.close()
-
-  def testUpdateMerge(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    options.merge = True
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    scm._Run(['checkout', '-q', 'feature'], options)
-    rev = scm.revinfo(options, (), None)
-    file_list = []
-    scm.update(options, (), file_list)
-    self.assertEqual(file_list, [join(self.base_path, x)
-                                 for x in ['a', 'b', 'c']])
-    # The actual commit that is created is unstable, so we verify its tree and
-    # parents instead.
-    self.assertEqual(scm._Capture(['rev-parse', 'HEAD:']),
-                     'd2e35c10ac24d6c621e14a1fcadceb533155627d')
-    parent = 'HEAD^' if sys.platform != 'win32' else 'HEAD^^'
-    self.assertEqual(scm._Capture(['rev-parse', parent + '1']), rev)
-    self.assertEqual(scm._Capture(['rev-parse', parent + '2']),
-                     scm._Capture(['rev-parse', 'origin/main']))
-    sys.stdout.close()
-
-  def testUpdateRebase(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    scm._Run(['checkout', '-q', 'feature'], options)
-    file_list = []
-    # Fake a 'y' key press.
-    scm._AskForData = self._GetAskForDataCallback(
-        'Cannot fast-forward merge, attempt to rebase? '
-        '(y)es / (q)uit / (s)kip : ', 'y')
-    scm.update(options, (), file_list)
-    self.assertEqual(file_list, [join(self.base_path, x)
-                                 for x in ['a', 'b', 'c']])
-    # The actual commit that is created is unstable, so we verify its tree and
-    # parent instead.
-    self.assertEqual(scm._Capture(['rev-parse', 'HEAD:']),
-                     'd2e35c10ac24d6c621e14a1fcadceb533155627d')
-    parent = 'HEAD^' if sys.platform != 'win32' else 'HEAD^^'
-    self.assertEqual(scm._Capture(['rev-parse', parent + '1']),
-                     scm._Capture(['rev-parse', 'origin/main']))
-    sys.stdout.close()
-
-  def testUpdateReset(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    options.reset = True
-
-    dir_path = join(self.base_path, 'c')
-    os.mkdir(dir_path)
-    with open(join(dir_path, 'nested'), 'w') as f:
-      f.writelines('new\n')
-
-    file_path = join(self.base_path, 'file')
-    with open(file_path, 'w') as f:
-      f.writelines('new\n')
-
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_list = []
-    scm.update(options, (), file_list)
-    self.assert_(gclient_scm.os.path.isdir(dir_path))
-    self.assert_(gclient_scm.os.path.isfile(file_path))
-    sys.stdout.close()
-
-  def testUpdateResetUnsetsFetchConfig(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    options.reset = True
-
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    scm._Run(['config', 'remote.origin.fetch',
-              '+refs/heads/bad/ref:refs/remotes/origin/bad/ref'], options)
-
-    file_list = []
-    scm.update(options, (), file_list)
-    self.assertEqual(scm.revinfo(options, (), None),
-                     '069c602044c5388d2d15c3f875b057c852003458')
-    sys.stdout.close()
-
-  def testUpdateResetDeleteUnversionedTrees(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    options.reset = True
-    options.delete_unversioned_trees = True
-
-    dir_path = join(self.base_path, 'dir')
-    os.mkdir(dir_path)
-    with open(join(dir_path, 'nested'), 'w') as f:
-      f.writelines('new\n')
-
-    file_path = join(self.base_path, 'file')
-    with open(file_path, 'w') as f:
-      f.writelines('new\n')
-
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_list = []
-    scm.update(options, (), file_list)
-    self.assert_(not gclient_scm.os.path.isdir(dir_path))
-    self.assert_(gclient_scm.os.path.isfile(file_path))
-    sys.stdout.close()
-
-  def testUpdateUnstagedConflict(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_path = join(self.base_path, 'b')
-    with open(file_path, 'w') as f:
-      f.writelines('conflict\n')
-    try:
-      scm.update(options, (), [])
-      self.fail()
-    except (gclient_scm.gclient_utils.Error, subprocess2.CalledProcessError):
-      # The exact exception text varies across git versions so it's not worth
-      # verifying it. It's fine as long as it throws.
-      pass
-    # Manually flush stdout since we can't verify it's content accurately across
-    # git versions.
-    sys.stdout.getvalue()
-    sys.stdout.close()
-
-  @unittest.skip('Skipping until crbug.com/670884 is resolved.')
-  def testUpdateLocked(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_path = join(self.base_path, '.git', 'index.lock')
-    with open(file_path, 'w'):
-      pass
-    with self.assertRaises(subprocess2.CalledProcessError):
-      scm.update(options, (), [])
-    sys.stdout.close()
-
-  def testUpdateLockedBreak(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    options.break_repo_locks = True
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_path = join(self.base_path, '.git', 'index.lock')
-    with open(file_path, 'w'):
-      pass
-    scm.update(options, (), [])
-    self.assertRegexpMatches(sys.stdout.getvalue(),
-                             r'breaking lock.*\.git[/|\\]index\.lock')
-    self.assertFalse(os.path.exists(file_path))
-    sys.stdout.close()
-
-  def testUpdateConflict(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_path = join(self.base_path, 'b')
-    with open(file_path, 'w') as f:
-      f.writelines('conflict\n')
-    scm._Run(['commit', '-am', 'test'], options)
-    scm._AskForData = self._GetAskForDataCallback(
-        'Cannot fast-forward merge, attempt to rebase? '
-        '(y)es / (q)uit / (s)kip : ', 'y')
-
-    with self.assertRaises(gclient_scm.gclient_utils.Error) as e:
-      scm.update(options, (), [])
-    self.assertEqual(
-        e.exception.args[0],
-        'Conflict while rebasing this branch.\n'
-        'Fix the conflict and run gclient again.\n'
-        'See \'man git-rebase\' for details.\n')
-
-    with self.assertRaises(gclient_scm.gclient_utils.Error) as e:
-      scm.update(options, (), [])
-    self.assertEqual(
-        e.exception.args[0], '\n____ . at refs/remotes/origin/main\n'
-        '\tYou have unstaged changes.\n'
-        '\tcd into ., run git status to see changes,\n'
-        '\tand commit, stash, or reset.\n')
-
-    sys.stdout.close()
-
-  def testRevinfo(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    rev_info = scm.revinfo(options, (), None)
-    self.assertEqual(rev_info, '069c602044c5388d2d15c3f875b057c852003458')
+class ManagedGitWrapperTestCase(BaseGitWrapperTestCase):
+    @mock.patch('gclient_scm.GitWrapper._IsCog')
+    @mock.patch('gclient_scm.GitWrapper._Run', return_value=True)
+    @mock.patch('gclient_scm.GitWrapper._SetFetchConfig')
+    @mock.patch('gclient_scm.GitWrapper._GetCurrentBranch')
+    def testCloneInCog(self, mockGetCurrentBranch, mockSetFetchConfig, mockRun,
+                       _mockIsCog):
+        """Test that we call the correct commands when in a cog workspace."""
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        scm._Clone('123123ab', self.url, options)
+        mockRun.assert_called_once_with(
+            ['citc', 'clone-repo', self.url, scm.checkout_path, '123123ab'],
+            options,
+            cwd=scm._root_dir,
+            retry=True,
+            print_stdout=False,
+            filter_fn=scm.filter)
+        mockSetFetchConfig.assert_called_once()
+        mockGetCurrentBranch.assert_called_once()
+
+    def testRevertMissing(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        file_path = join(self.base_path, 'a')
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+        scm.update(options, None, file_list)
+        gclient_scm.os.remove(file_path)
+        file_list = []
+        scm.revert(options, self.args, file_list)
+        self.assertEqual(file_list, [file_path])
+        file_list = []
+        scm.diff(options, self.args, file_list)
+        self.assertEqual(file_list, [])
+        sys.stdout.close()
+
+    def testRevertNone(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+        scm.update(options, None, file_list)
+        file_list = []
+        scm.revert(options, self.args, file_list)
+        self.assertEqual(file_list, [])
+        self.assertEqual(scm.revinfo(options, self.args, None),
+                         'a7142dc9f0009350b96a11f372b6ea658592aa95')
+        sys.stdout.close()
+
+    def testRevertModified(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+        scm.update(options, None, file_list)
+        file_path = join(self.base_path, 'a')
+        with open(file_path, 'a') as f:
+            f.writelines('touched\n')
+        file_list = []
+        scm.revert(options, self.args, file_list)
+        self.assertEqual(file_list, [file_path])
+        file_list = []
+        scm.diff(options, self.args, file_list)
+        self.assertEqual(file_list, [])
+        self.assertEqual(scm.revinfo(options, self.args, None),
+                         'a7142dc9f0009350b96a11f372b6ea658592aa95')
+        sys.stdout.close()
+
+    def testRevertNew(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+        scm.update(options, None, file_list)
+        file_path = join(self.base_path, 'c')
+        with open(file_path, 'w') as f:
+            f.writelines('new\n')
+        Popen([GIT, 'add', 'c'], stdout=PIPE, stderr=STDOUT,
+              cwd=self.base_path).communicate()
+        file_list = []
+        scm.revert(options, self.args, file_list)
+        self.assertEqual(file_list, [file_path])
+        file_list = []
+        scm.diff(options, self.args, file_list)
+        self.assertEqual(file_list, [])
+        self.assertEqual(scm.revinfo(options, self.args, None),
+                         'a7142dc9f0009350b96a11f372b6ea658592aa95')
+        sys.stdout.close()
+
+    def testStatusRef(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        file_paths = [join(self.base_path, 'a')]
+        with open(file_paths[0], 'a') as f:
+            f.writelines('touched\n')
+        scm = gclient_scm.GitWrapper(self.url + '@refs/heads/feature',
+                                     self.root_dir, self.relpath)
+        file_paths.append(join(self.base_path, 'c'))  # feature branch touches c
+        file_list = []
+        scm.status(options, self.args, file_list)
+        self.assertEqual(file_list, file_paths)
+        self.checkstdout((
+            '\n________ running \'git -c core.quotePath=false diff --name-status '
+            'refs/remotes/origin/feature\' in \'%s\'\n\nM\ta\n') %
+                         join(self.root_dir, '.'))
+
+    def testStatusNew(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        file_path = join(self.base_path, 'a')
+        with open(file_path, 'a') as f:
+            f.writelines('touched\n')
+        scm = gclient_scm.GitWrapper(
+            self.url + '@069c602044c5388d2d15c3f875b057c852003458',
+            self.root_dir, self.relpath)
+        file_list = []
+        scm.status(options, self.args, file_list)
+        self.assertEqual(file_list, [file_path])
+        self.checkstdout((
+            '\n________ running \'git -c core.quotePath=false diff --name-status '
+            '069c602044c5388d2d15c3f875b057c852003458\' in \'%s\'\n\nM\ta\n') %
+                         join(self.root_dir, '.'))
+
+    def testStatusNewNoBaseRev(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        file_path = join(self.base_path, 'a')
+        with open(file_path, 'a') as f:
+            f.writelines('touched\n')
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+        scm.status(options, self.args, file_list)
+        self.assertEqual(file_list, [file_path])
+        self.checkstdout((
+            '\n________ running \'git -c core.quotePath=false diff --name-status'
+            '\' in \'%s\'\n\nM\ta\n') % join(self.root_dir, '.'))
+
+    def testStatus2New(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        expected_file_list = []
+        for f in ['a', 'b']:
+            file_path = join(self.base_path, f)
+            with open(file_path, 'a') as f:
+                f.writelines('touched\n')
+            expected_file_list.extend([file_path])
+        scm = gclient_scm.GitWrapper(
+            self.url + '@069c602044c5388d2d15c3f875b057c852003458',
+            self.root_dir, self.relpath)
+        file_list = []
+        scm.status(options, self.args, file_list)
+        expected_file_list = [join(self.base_path, x) for x in ['a', 'b']]
+        self.assertEqual(sorted(file_list), expected_file_list)
+        self.checkstdout((
+            '\n________ running \'git -c core.quotePath=false diff --name-status '
+            '069c602044c5388d2d15c3f875b057c852003458\' in \'%s\'\n\nM\ta\nM\tb\n'
+        ) % join(self.root_dir, '.'))
+
+    def testUpdateUpdate(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        expected_file_list = [join(self.base_path, x) for x in ['a', 'b']]
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+
+        scm.update(options, (), file_list)
+        self.assertEqual(file_list, expected_file_list)
+        self.assertEqual(scm.revinfo(options, (), None),
+                         'a7142dc9f0009350b96a11f372b6ea658592aa95')
+        self.assertEqual(
+            scm._Capture(['config', '--get', 'diff.ignoreSubmodules']), 'dirty')
+        self.assertEqual(
+            scm._Capture(['config', '--get', 'fetch.recurseSubmodules']), 'off')
+        sys.stdout.close()
+
+    def testUpdateMerge(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        options.merge = True
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        scm._Run(['checkout', '-q', 'feature'], options)
+        rev = scm.revinfo(options, (), None)
+        file_list = []
+        scm.update(options, (), file_list)
+        self.assertEqual(file_list,
+                         [join(self.base_path, x) for x in ['a', 'b', 'c']])
+        # The actual commit that is created is unstable, so we verify its tree
+        # and parents instead.
+        self.assertEqual(scm._Capture(['rev-parse', 'HEAD:']),
+                         'd2e35c10ac24d6c621e14a1fcadceb533155627d')
+        parent = 'HEAD^' if sys.platform != 'win32' else 'HEAD^^'
+        self.assertEqual(scm._Capture(['rev-parse', parent + '1']), rev)
+        self.assertEqual(scm._Capture(['rev-parse', parent + '2']),
+                         scm._Capture(['rev-parse', 'origin/main']))
+        sys.stdout.close()
+
+    def testUpdateRebase(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        scm._Run(['checkout', '-q', 'feature'], options)
+        file_list = []
+        # Fake a 'y' key press.
+        scm._AskForData = self._GetAskForDataCallback(
+            'Cannot fast-forward merge, attempt to rebase? '
+            '(y)es / (q)uit / (s)kip : ', 'y')
+        scm.update(options, (), file_list)
+        self.assertEqual(file_list,
+                         [join(self.base_path, x) for x in ['a', 'b', 'c']])
+        # The actual commit that is created is unstable, so we verify its tree
+        # and parent instead.
+        self.assertEqual(scm._Capture(['rev-parse', 'HEAD:']),
+                         'd2e35c10ac24d6c621e14a1fcadceb533155627d')
+        parent = 'HEAD^' if sys.platform != 'win32' else 'HEAD^^'
+        self.assertEqual(scm._Capture(['rev-parse', parent + '1']),
+                         scm._Capture(['rev-parse', 'origin/main']))
+        sys.stdout.close()
+
+    def testUpdateReset(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        options.reset = True
+
+        dir_path = join(self.base_path, 'c')
+        os.mkdir(dir_path)
+        with open(join(dir_path, 'nested'), 'w') as f:
+            f.writelines('new\n')
+
+        file_path = join(self.base_path, 'file')
+        with open(file_path, 'w') as f:
+            f.writelines('new\n')
+
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+        scm.update(options, (), file_list)
+        self.assert_(gclient_scm.os.path.isdir(dir_path))
+        self.assert_(gclient_scm.os.path.isfile(file_path))
+        sys.stdout.close()
+
+    def testUpdateResetUnsetsFetchConfig(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        options.reset = True
+
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        scm._Run([
+            'config', 'remote.origin.fetch',
+            '+refs/heads/bad/ref:refs/remotes/origin/bad/ref'
+        ], options)
+
+        file_list = []
+        scm.update(options, (), file_list)
+        self.assertEqual(scm.revinfo(options, (), None),
+                         '069c602044c5388d2d15c3f875b057c852003458')
+        sys.stdout.close()
+
+    def testUpdateResetDeleteUnversionedTrees(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        options.reset = True
+        options.delete_unversioned_trees = True
+
+        dir_path = join(self.base_path, 'dir')
+        os.mkdir(dir_path)
+        with open(join(dir_path, 'nested'), 'w') as f:
+            f.writelines('new\n')
+
+        file_path = join(self.base_path, 'file')
+        with open(file_path, 'w') as f:
+            f.writelines('new\n')
+
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+        scm.update(options, (), file_list)
+        self.assert_(not gclient_scm.os.path.isdir(dir_path))
+        self.assert_(gclient_scm.os.path.isfile(file_path))
+        sys.stdout.close()
+
+    def testUpdateUnstagedConflict(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_path = join(self.base_path, 'b')
+        with open(file_path, 'w') as f:
+            f.writelines('conflict\n')
+        try:
+            scm.update(options, (), [])
+            self.fail()
+        except (gclient_scm.gclient_utils.Error,
+                subprocess2.CalledProcessError):
+            # The exact exception text varies across git versions so it's not
+            # worth verifying it. It's fine as long as it throws.
+            pass
+        # Manually flush stdout since we can't verify it's content accurately
+        # across git versions.
+        sys.stdout.getvalue()
+        sys.stdout.close()
+
+    @unittest.skip('Skipping until crbug.com/670884 is resolved.')
+    def testUpdateLocked(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_path = join(self.base_path, '.git', 'index.lock')
+        with open(file_path, 'w'):
+            pass
+        with self.assertRaises(subprocess2.CalledProcessError):
+            scm.update(options, (), [])
+        sys.stdout.close()
+
+    def testUpdateLockedBreak(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        options.break_repo_locks = True
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_path = join(self.base_path, '.git', 'index.lock')
+        with open(file_path, 'w'):
+            pass
+        scm.update(options, (), [])
+        self.assertRegexpMatches(sys.stdout.getvalue(),
+                                 r'breaking lock.*\.git[/|\\]index\.lock')
+        self.assertFalse(os.path.exists(file_path))
+        sys.stdout.close()
+
+    def testUpdateConflict(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_path = join(self.base_path, 'b')
+        with open(file_path, 'w') as f:
+            f.writelines('conflict\n')
+        scm._Run(['commit', '-am', 'test'], options)
+        scm._AskForData = self._GetAskForDataCallback(
+            'Cannot fast-forward merge, attempt to rebase? '
+            '(y)es / (q)uit / (s)kip : ', 'y')
+
+        with self.assertRaises(gclient_scm.gclient_utils.Error) as e:
+            scm.update(options, (), [])
+        self.assertEqual(
+            e.exception.args[0], 'Conflict while rebasing this branch.\n'
+            'Fix the conflict and run gclient again.\n'
+            'See \'man git-rebase\' for details.\n')
+
+        with self.assertRaises(gclient_scm.gclient_utils.Error) as e:
+            scm.update(options, (), [])
+        self.assertEqual(
+            e.exception.args[0], '\n____ . at refs/remotes/origin/main\n'
+            '\tYou have unstaged changes.\n'
+            '\tcd into ., run git status to see changes,\n'
+            '\tand commit, stash, or reset.\n')
+
+        sys.stdout.close()
+
+    def testRevinfo(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        rev_info = scm.revinfo(options, (), None)
+        self.assertEqual(rev_info, '069c602044c5388d2d15c3f875b057c852003458')
 
 
 class ManagedGitWrapperTestCaseMock(unittest.TestCase):
-  class OptionsObject(object):
-    def __init__(self, verbose=False, revision=None, force=False):
-      self.verbose = verbose
-      self.revision = revision
-      self.deps_os = None
-      self.force = force
-      self.reset = False
-      self.nohooks = False
-      self.break_repo_locks = False
-      # TODO(maruel): Test --jobs > 1.
-      self.jobs = 1
-      self.patch_ref = None
-      self.patch_repo = None
-      self.rebase_patch_ref = True
-
-  def Options(self, *args, **kwargs):
-    return self.OptionsObject(*args, **kwargs)
-
-  def checkstdout(self, expected):
-    # pylint: disable=no-member
-    value = sys.stdout.getvalue()
-    sys.stdout.close()
-    # Check that the expected output appears.
-    self.assertIn(expected, strip_timestamps(value))
-
-  def setUp(self):
-    self.fake_hash_1 = 't0ta11yf4k3'
-    self.fake_hash_2 = '3v3nf4k3r'
-    self.url = 'git://foo'
-    self.root_dir = '/tmp' if sys.platform != 'win32' else 't:\\tmp'
-    self.relpath = 'fake'
-    self.base_path = os.path.join(self.root_dir, self.relpath)
-    self.backup_base_path = os.path.join(self.root_dir,
-                                         'old_%s.git' % self.relpath)
-    mock.patch('gclient_scm.scm.GIT.ApplyEnvVars').start()
-    mock.patch('gclient_scm.GitWrapper._CheckMinVersion').start()
-    mock.patch('gclient_scm.GitWrapper._Fetch').start()
-    mock.patch('gclient_scm.GitWrapper._DeleteOrMove').start()
-    mock.patch('sys.stdout', StringIO()).start()
-    self.addCleanup(mock.patch.stopall)
-
-  @mock.patch('scm.GIT.IsValidRevision')
-  @mock.patch('os.path.isdir', lambda _: True)
-  def testGetUsableRevGit(self, mockIsValidRevision):
-    # pylint: disable=no-member
-    options = self.Options(verbose=True)
-
-    mockIsValidRevision.side_effect = lambda cwd, rev: rev != '1'
-
-    git_scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                     self.relpath)
-    # A [fake] git sha1 with a git repo should work (this is in the case that
-    # the LKGR gets flipped to git sha1's some day).
-    self.assertEqual(git_scm.GetUsableRev(self.fake_hash_1, options),
-                     self.fake_hash_1)
-    # An SVN rev with an existing purely git repo should raise an exception.
-    self.assertRaises(gclient_scm.gclient_utils.Error,
-                      git_scm.GetUsableRev, '1', options)
-
-  @mock.patch('gclient_scm.GitWrapper._Clone')
-  @mock.patch('os.path.isdir')
-  @mock.patch('os.path.exists')
-  @mock.patch('subprocess2.check_output')
-  def testUpdateNoDotGit(
-      self, mockCheckOutput, mockExists, mockIsdir, mockClone):
-    mockIsdir.side_effect = lambda path: path == self.base_path
-    mockExists.side_effect = lambda path: path == self.base_path
-    mockCheckOutput.side_effect = [b'refs/remotes/origin/main', b'', b'']
-
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(
-        self.url, self.root_dir, self.relpath)
-    scm.update(options, None, [])
-
-    env = gclient_scm.scm.GIT.ApplyEnvVars({})
-    self.assertEqual(mockCheckOutput.mock_calls, [
-        mock.call(['git', 'symbolic-ref', 'refs/remotes/origin/HEAD'],
-                  cwd=self.base_path,
-                  env=env,
-                  stderr=-1),
-        mock.call(['git', '-c', 'core.quotePath=false', 'ls-files'],
-                  cwd=self.base_path,
-                  env=env,
-                  stderr=-1),
-        mock.call(['git', 'rev-parse', '--verify', 'HEAD'],
-                  cwd=self.base_path,
-                  env=env,
-                  stderr=-1),
-    ])
-    mockClone.assert_called_with('refs/remotes/origin/main', self.url, options)
-    self.checkstdout('\n')
-
-  @mock.patch('gclient_scm.GitWrapper._Clone')
-  @mock.patch('os.path.isdir')
-  @mock.patch('os.path.exists')
-  @mock.patch('subprocess2.check_output')
-  def testUpdateConflict(
-      self, mockCheckOutput, mockExists, mockIsdir, mockClone):
-    mockIsdir.side_effect = lambda path: path == self.base_path
-    mockExists.side_effect = lambda path: path == self.base_path
-    mockCheckOutput.side_effect = [b'refs/remotes/origin/main', b'', b'']
-    mockClone.side_effect = [
-        gclient_scm.subprocess2.CalledProcessError(
-            None, None, None, None, None),
-        None,
-    ]
-
-    options = self.Options()
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                            self.relpath)
-    scm.update(options, None, [])
-
-    env = gclient_scm.scm.GIT.ApplyEnvVars({})
-    self.assertEqual(mockCheckOutput.mock_calls, [
-        mock.call(['git', 'symbolic-ref', 'refs/remotes/origin/HEAD'],
-                  cwd=self.base_path,
-                  env=env,
-                  stderr=-1),
-        mock.call(['git', '-c', 'core.quotePath=false', 'ls-files'],
-                  cwd=self.base_path,
-                  env=env,
-                  stderr=-1),
-        mock.call(['git', 'rev-parse', '--verify', 'HEAD'],
-                  cwd=self.base_path,
-                  env=env,
-                  stderr=-1),
-    ])
-    mockClone.assert_called_with('refs/remotes/origin/main', self.url, options)
-    self.checkstdout('\n')
+    class OptionsObject(object):
+        def __init__(self, verbose=False, revision=None, force=False):
+            self.verbose = verbose
+            self.revision = revision
+            self.deps_os = None
+            self.force = force
+            self.reset = False
+            self.nohooks = False
+            self.break_repo_locks = False
+            # TODO(maruel): Test --jobs > 1.
+            self.jobs = 1
+            self.patch_ref = None
+            self.patch_repo = None
+            self.rebase_patch_ref = True
+
+    def Options(self, *args, **kwargs):
+        return self.OptionsObject(*args, **kwargs)
+
+    def checkstdout(self, expected):
+        # pylint: disable=no-member
+        value = sys.stdout.getvalue()
+        sys.stdout.close()
+        # Check that the expected output appears.
+        self.assertIn(expected, strip_timestamps(value))
+
+    def setUp(self):
+        self.fake_hash_1 = 't0ta11yf4k3'
+        self.fake_hash_2 = '3v3nf4k3r'
+        self.url = 'git://foo'
+        self.root_dir = '/tmp' if sys.platform != 'win32' else 't:\\tmp'
+        self.relpath = 'fake'
+        self.base_path = os.path.join(self.root_dir, self.relpath)
+        self.backup_base_path = os.path.join(self.root_dir,
+                                             'old_%s.git' % self.relpath)
+        mock.patch('gclient_scm.scm.GIT.ApplyEnvVars').start()
+        mock.patch('gclient_scm.GitWrapper._CheckMinVersion').start()
+        mock.patch('gclient_scm.GitWrapper._Fetch').start()
+        mock.patch('gclient_scm.GitWrapper._DeleteOrMove').start()
+        mock.patch('sys.stdout', StringIO()).start()
+        self.addCleanup(mock.patch.stopall)
+
+    @mock.patch('scm.GIT.IsValidRevision')
+    @mock.patch('os.path.isdir', lambda _: True)
+    def testGetUsableRevGit(self, mockIsValidRevision):
+        # pylint: disable=no-member
+        options = self.Options(verbose=True)
+
+        mockIsValidRevision.side_effect = lambda cwd, rev: rev != '1'
+
+        git_scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        # A [fake] git sha1 with a git repo should work (this is in the case
+        # that the LKGR gets flipped to git sha1's some day).
+        self.assertEqual(git_scm.GetUsableRev(self.fake_hash_1, options),
+                         self.fake_hash_1)
+        # An SVN rev with an existing purely git repo should raise an exception.
+        self.assertRaises(gclient_scm.gclient_utils.Error, git_scm.GetUsableRev,
+                          '1', options)
+
+    @mock.patch('gclient_scm.GitWrapper._Clone')
+    @mock.patch('os.path.isdir')
+    @mock.patch('os.path.exists')
+    @mock.patch('subprocess2.check_output')
+    def testUpdateNoDotGit(self, mockCheckOutput, mockExists, mockIsdir,
+                           mockClone):
+        mockIsdir.side_effect = lambda path: path == self.base_path
+        mockExists.side_effect = lambda path: path == self.base_path
+        mockCheckOutput.side_effect = [b'refs/remotes/origin/main', b'', b'']
+
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        scm.update(options, None, [])
+
+        env = gclient_scm.scm.GIT.ApplyEnvVars({})
+        self.assertEqual(mockCheckOutput.mock_calls, [
+            mock.call(['git', 'symbolic-ref', 'refs/remotes/origin/HEAD'],
+                      cwd=self.base_path,
+                      env=env,
+                      stderr=-1),
+            mock.call(['git', '-c', 'core.quotePath=false', 'ls-files'],
+                      cwd=self.base_path,
+                      env=env,
+                      stderr=-1),
+            mock.call(['git', 'rev-parse', '--verify', 'HEAD'],
+                      cwd=self.base_path,
+                      env=env,
+                      stderr=-1),
+        ])
+        mockClone.assert_called_with('refs/remotes/origin/main', self.url,
+                                     options)
+        self.checkstdout('\n')
+
+    @mock.patch('gclient_scm.GitWrapper._Clone')
+    @mock.patch('os.path.isdir')
+    @mock.patch('os.path.exists')
+    @mock.patch('subprocess2.check_output')
+    def testUpdateConflict(self, mockCheckOutput, mockExists, mockIsdir,
+                           mockClone):
+        mockIsdir.side_effect = lambda path: path == self.base_path
+        mockExists.side_effect = lambda path: path == self.base_path
+        mockCheckOutput.side_effect = [b'refs/remotes/origin/main', b'', b'']
+        mockClone.side_effect = [
+            gclient_scm.subprocess2.CalledProcessError(None, None, None, None,
+                                                       None),
+            None,
+        ]
+
+        options = self.Options()
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        scm.update(options, None, [])
+
+        env = gclient_scm.scm.GIT.ApplyEnvVars({})
+        self.assertEqual(mockCheckOutput.mock_calls, [
+            mock.call(['git', 'symbolic-ref', 'refs/remotes/origin/HEAD'],
+                      cwd=self.base_path,
+                      env=env,
+                      stderr=-1),
+            mock.call(['git', '-c', 'core.quotePath=false', 'ls-files'],
+                      cwd=self.base_path,
+                      env=env,
+                      stderr=-1),
+            mock.call(['git', 'rev-parse', '--verify', 'HEAD'],
+                      cwd=self.base_path,
+                      env=env,
+                      stderr=-1),
+        ])
+        mockClone.assert_called_with('refs/remotes/origin/main', self.url,
+                                     options)
+        self.checkstdout('\n')
 
 
 class UnmanagedGitWrapperTestCase(BaseGitWrapperTestCase):
-  def checkInStdout(self, expected):
-    # pylint: disable=no-member
-    value = sys.stdout.getvalue()
-    sys.stdout.close()
-    self.assertIn(expected, value)
-
-  def checkNotInStdout(self, expected):
-    # pylint: disable=no-member
-    value = sys.stdout.getvalue()
-    sys.stdout.close()
-    self.assertNotIn(expected, value)
-
-  def getCurrentBranch(self):
-    # Returns name of current branch or HEAD for detached HEAD
-    branch = gclient_scm.scm.GIT.Capture(['rev-parse', '--abbrev-ref', 'HEAD'],
-                                          cwd=self.base_path)
-    if branch == 'HEAD':
-      return None
-    return branch
-
-  def testUpdateClone(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-
-    origin_root_dir = self.root_dir
-    self.addCleanup(gclient_utils.rmtree, origin_root_dir)
-
-    self.root_dir = tempfile.mkdtemp()
-    self.relpath = '.'
-    self.base_path = join(self.root_dir, self.relpath)
-
-    scm = gclient_scm.GitWrapper(origin_root_dir,
-                                 self.root_dir,
-                                 self.relpath)
-
-    expected_file_list = [join(self.base_path, "a"),
-                          join(self.base_path, "b")]
-    file_list = []
-    options.revision = 'unmanaged'
-    scm.update(options, (), file_list)
-
-    self.assertEqual(file_list, expected_file_list)
-    self.assertEqual(scm.revinfo(options, (), None),
-                     '069c602044c5388d2d15c3f875b057c852003458')
-    # indicates detached HEAD
-    self.assertEqual(self.getCurrentBranch(), None)
-    self.checkInStdout(
-      'Checked out refs/remotes/origin/main to a detached HEAD')
-
-
-  def testUpdateCloneOnCommit(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-
-    origin_root_dir = self.root_dir
-    self.addCleanup(gclient_utils.rmtree, origin_root_dir)
-
-    self.root_dir = tempfile.mkdtemp()
-    self.relpath = '.'
-    self.base_path = join(self.root_dir, self.relpath)
-    url_with_commit_ref = origin_root_dir +\
-                          '@a7142dc9f0009350b96a11f372b6ea658592aa95'
-
-    scm = gclient_scm.GitWrapper(url_with_commit_ref,
-                                 self.root_dir,
-                                 self.relpath)
-
-    expected_file_list = [join(self.base_path, "a"),
-                          join(self.base_path, "b")]
-    file_list = []
-    options.revision = 'unmanaged'
-    scm.update(options, (), file_list)
-
-    self.assertEqual(file_list, expected_file_list)
-    self.assertEqual(scm.revinfo(options, (), None),
-                     'a7142dc9f0009350b96a11f372b6ea658592aa95')
-    # indicates detached HEAD
-    self.assertEqual(self.getCurrentBranch(), None)
-    self.checkInStdout(
-      'Checked out a7142dc9f0009350b96a11f372b6ea658592aa95 to a detached HEAD')
-
-  def testUpdateCloneOnBranch(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-
-    origin_root_dir = self.root_dir
-    self.addCleanup(gclient_utils.rmtree, origin_root_dir)
-
-    self.root_dir = tempfile.mkdtemp()
-    self.relpath = '.'
-    self.base_path = join(self.root_dir, self.relpath)
-    url_with_branch_ref = origin_root_dir + '@feature'
-
-    scm = gclient_scm.GitWrapper(url_with_branch_ref,
-                                 self.root_dir,
-                                 self.relpath)
-
-    expected_file_list = [join(self.base_path, "a"),
-                          join(self.base_path, "b"),
-                          join(self.base_path, "c")]
-    file_list = []
-    options.revision = 'unmanaged'
-    scm.update(options, (), file_list)
-
-    self.assertEqual(file_list, expected_file_list)
-    self.assertEqual(scm.revinfo(options, (), None),
-                     '9a51244740b25fa2ded5252ca00a3178d3f665a9')
-    # indicates detached HEAD
-    self.assertEqual(self.getCurrentBranch(), None)
-    self.checkInStdout(
-        'Checked out 9a51244740b25fa2ded5252ca00a3178d3f665a9 '
-        'to a detached HEAD')
-
-  def testUpdateCloneOnFetchedRemoteBranch(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-
-    origin_root_dir = self.root_dir
-    self.addCleanup(gclient_utils.rmtree, origin_root_dir)
-
-    self.root_dir = tempfile.mkdtemp()
-    self.relpath = '.'
-    self.base_path = join(self.root_dir, self.relpath)
-    url_with_branch_ref = origin_root_dir + '@refs/remotes/origin/feature'
-
-    scm = gclient_scm.GitWrapper(url_with_branch_ref,
-                                 self.root_dir,
-                                 self.relpath)
-
-    expected_file_list = [join(self.base_path, "a"),
-                          join(self.base_path, "b"),
-                          join(self.base_path, "c")]
-    file_list = []
-    options.revision = 'unmanaged'
-    scm.update(options, (), file_list)
-
-    self.assertEqual(file_list, expected_file_list)
-    self.assertEqual(scm.revinfo(options, (), None),
-                     '9a51244740b25fa2ded5252ca00a3178d3f665a9')
-    # indicates detached HEAD
-    self.assertEqual(self.getCurrentBranch(), None)
-    self.checkInStdout(
-      'Checked out refs/remotes/origin/feature to a detached HEAD')
-
-  def testUpdateCloneOnTrueRemoteBranch(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-
-    origin_root_dir = self.root_dir
-    self.addCleanup(gclient_utils.rmtree, origin_root_dir)
-
-    self.root_dir = tempfile.mkdtemp()
-    self.relpath = '.'
-    self.base_path = join(self.root_dir, self.relpath)
-    url_with_branch_ref = origin_root_dir + '@refs/heads/feature'
-
-    scm = gclient_scm.GitWrapper(url_with_branch_ref,
-                                 self.root_dir,
-                                 self.relpath)
-
-    expected_file_list = [join(self.base_path, "a"),
-                          join(self.base_path, "b"),
-                          join(self.base_path, "c")]
-    file_list = []
-    options.revision = 'unmanaged'
-    scm.update(options, (), file_list)
-
-    self.assertEqual(file_list, expected_file_list)
-    self.assertEqual(scm.revinfo(options, (), None),
-                     '9a51244740b25fa2ded5252ca00a3178d3f665a9')
-    # @refs/heads/feature is AKA @refs/remotes/origin/feature in the clone, so
-    # should be treated as such by gclient.
-    # TODO(mmoss): Though really, we should only allow DEPS to specify branches
-    # as they are known in the upstream repo, since the mapping into the local
-    # repo can be modified by users (or we might even want to change the gclient
-    # defaults at some point). But that will take more work to stop using
-    # refs/remotes/ everywhere that we do (and to stop assuming a DEPS ref will
-    # always resolve locally, like when passing them to show-ref or rev-list).
-    self.assertEqual(self.getCurrentBranch(), None)
-    self.checkInStdout(
-      'Checked out refs/remotes/origin/feature to a detached HEAD')
-
-  def testUpdateUpdate(self):
-    if not self.enabled:
-      return
-    options = self.Options()
-    expected_file_list = []
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir,
-                                 self.relpath)
-    file_list = []
-    options.revision = 'unmanaged'
-    scm.update(options, (), file_list)
-    self.assertEqual(file_list, expected_file_list)
-    self.assertEqual(scm.revinfo(options, (), None),
-                     '069c602044c5388d2d15c3f875b057c852003458')
-    self.checkstdout('________ unmanaged solution; skipping .\n')
+    def checkInStdout(self, expected):
+        # pylint: disable=no-member
+        value = sys.stdout.getvalue()
+        sys.stdout.close()
+        self.assertIn(expected, value)
+
+    def checkNotInStdout(self, expected):
+        # pylint: disable=no-member
+        value = sys.stdout.getvalue()
+        sys.stdout.close()
+        self.assertNotIn(expected, value)
+
+    def getCurrentBranch(self):
+        # Returns name of current branch or HEAD for detached HEAD
+        branch = gclient_scm.scm.GIT.Capture(
+            ['rev-parse', '--abbrev-ref', 'HEAD'], cwd=self.base_path)
+        if branch == 'HEAD':
+            return None
+        return branch
+
+    def testUpdateClone(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+
+        origin_root_dir = self.root_dir
+        self.addCleanup(gclient_utils.rmtree, origin_root_dir)
+
+        self.root_dir = tempfile.mkdtemp()
+        self.relpath = '.'
+        self.base_path = join(self.root_dir, self.relpath)
+
+        scm = gclient_scm.GitWrapper(origin_root_dir, self.root_dir,
+                                     self.relpath)
 
+        expected_file_list = [
+            join(self.base_path, "a"),
+            join(self.base_path, "b")
+        ]
+        file_list = []
+        options.revision = 'unmanaged'
+        scm.update(options, (), file_list)
+
+        self.assertEqual(file_list, expected_file_list)
+        self.assertEqual(scm.revinfo(options, (), None),
+                         '069c602044c5388d2d15c3f875b057c852003458')
+        # indicates detached HEAD
+        self.assertEqual(self.getCurrentBranch(), None)
+        self.checkInStdout(
+            'Checked out refs/remotes/origin/main to a detached HEAD')
+
+    def testUpdateCloneOnCommit(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+
+        origin_root_dir = self.root_dir
+        self.addCleanup(gclient_utils.rmtree, origin_root_dir)
+
+        self.root_dir = tempfile.mkdtemp()
+        self.relpath = '.'
+        self.base_path = join(self.root_dir, self.relpath)
+        url_with_commit_ref = origin_root_dir +\
+                              '@a7142dc9f0009350b96a11f372b6ea658592aa95'
+
+        scm = gclient_scm.GitWrapper(url_with_commit_ref, self.root_dir,
+                                     self.relpath)
 
-class CipdWrapperTestCase(unittest.TestCase):
+        expected_file_list = [
+            join(self.base_path, "a"),
+            join(self.base_path, "b")
+        ]
+        file_list = []
+        options.revision = 'unmanaged'
+        scm.update(options, (), file_list)
+
+        self.assertEqual(file_list, expected_file_list)
+        self.assertEqual(scm.revinfo(options, (), None),
+                         'a7142dc9f0009350b96a11f372b6ea658592aa95')
+        # indicates detached HEAD
+        self.assertEqual(self.getCurrentBranch(), None)
+        self.checkInStdout(
+            'Checked out a7142dc9f0009350b96a11f372b6ea658592aa95 to a detached HEAD'
+        )
+
+    def testUpdateCloneOnBranch(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+
+        origin_root_dir = self.root_dir
+        self.addCleanup(gclient_utils.rmtree, origin_root_dir)
+
+        self.root_dir = tempfile.mkdtemp()
+        self.relpath = '.'
+        self.base_path = join(self.root_dir, self.relpath)
+        url_with_branch_ref = origin_root_dir + '@feature'
+
+        scm = gclient_scm.GitWrapper(url_with_branch_ref, self.root_dir,
+                                     self.relpath)
 
-  def setUp(self):
-    # Create this before setting up mocks.
-    self._cipd_root_dir = tempfile.mkdtemp()
-    self._workdir = tempfile.mkdtemp()
-
-    self._cipd_instance_url = 'https://chrome-infra-packages.appspot.com'
-    self._cipd_root = gclient_scm.CipdRoot(
-        self._cipd_root_dir,
-        self._cipd_instance_url)
-    self._cipd_packages = [
-        self._cipd_root.add_package('f', 'foo_package', 'foo_version'),
-        self._cipd_root.add_package('b', 'bar_package', 'bar_version'),
-        self._cipd_root.add_package('b', 'baz_package', 'baz_version'),
-    ]
-    mock.patch('tempfile.mkdtemp', lambda: self._workdir).start()
-    mock.patch('gclient_scm.CipdRoot.add_package').start()
-    mock.patch('gclient_scm.CipdRoot.clobber').start()
-    mock.patch('gclient_scm.CipdRoot.ensure_file_resolve').start()
-    mock.patch('gclient_scm.CipdRoot.ensure').start()
-    self.addCleanup(mock.patch.stopall)
-    self.addCleanup(gclient_utils.rmtree, self._cipd_root_dir)
-    self.addCleanup(gclient_utils.rmtree, self._workdir)
-
-  def createScmWithPackageThatSatisfies(self, condition):
-    return gclient_scm.CipdWrapper(
-        url=self._cipd_instance_url,
-        root_dir=self._cipd_root_dir,
-        relpath='fake_relpath',
-        root=self._cipd_root,
-        package=self.getPackageThatSatisfies(condition))
-
-  def getPackageThatSatisfies(self, condition):
-    for p in self._cipd_packages:
-      if condition(p):
-        return p
-
-    self.fail('Unable to find a satisfactory package.')
-
-  def testRevert(self):
-    """Checks that revert does nothing."""
-    scm = self.createScmWithPackageThatSatisfies(lambda _: True)
-    scm.revert(None, (), [])
-
-  @mock.patch('gclient_scm.gclient_utils.CheckCallAndFilter')
-  @mock.patch('gclient_scm.gclient_utils.rmtree')
-  def testRevinfo(self, mockRmtree, mockCheckCallAndFilter):
-    """Checks that revinfo uses the JSON from cipd describe."""
-    scm = self.createScmWithPackageThatSatisfies(lambda _: True)
-
-    expected_revinfo = '0123456789abcdef0123456789abcdef01234567'
-    json_contents = {
-        'result': {
-            'pin': {
-                'instance_id': expected_revinfo,
-            }
-        }
-    }
-    describe_json_path = join(self._workdir, 'describe.json')
-    with open(describe_json_path, 'w') as describe_json:
-      json.dump(json_contents, describe_json)
+        expected_file_list = [
+            join(self.base_path, "a"),
+            join(self.base_path, "b"),
+            join(self.base_path, "c")
+        ]
+        file_list = []
+        options.revision = 'unmanaged'
+        scm.update(options, (), file_list)
+
+        self.assertEqual(file_list, expected_file_list)
+        self.assertEqual(scm.revinfo(options, (), None),
+                         '9a51244740b25fa2ded5252ca00a3178d3f665a9')
+        # indicates detached HEAD
+        self.assertEqual(self.getCurrentBranch(), None)
+        self.checkInStdout(
+            'Checked out 9a51244740b25fa2ded5252ca00a3178d3f665a9 '
+            'to a detached HEAD')
+
+    def testUpdateCloneOnFetchedRemoteBranch(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+
+        origin_root_dir = self.root_dir
+        self.addCleanup(gclient_utils.rmtree, origin_root_dir)
+
+        self.root_dir = tempfile.mkdtemp()
+        self.relpath = '.'
+        self.base_path = join(self.root_dir, self.relpath)
+        url_with_branch_ref = origin_root_dir + '@refs/remotes/origin/feature'
+
+        scm = gclient_scm.GitWrapper(url_with_branch_ref, self.root_dir,
+                                     self.relpath)
 
-    revinfo = scm.revinfo(None, (), [])
-    self.assertEqual(revinfo, expected_revinfo)
+        expected_file_list = [
+            join(self.base_path, "a"),
+            join(self.base_path, "b"),
+            join(self.base_path, "c")
+        ]
+        file_list = []
+        options.revision = 'unmanaged'
+        scm.update(options, (), file_list)
+
+        self.assertEqual(file_list, expected_file_list)
+        self.assertEqual(scm.revinfo(options, (), None),
+                         '9a51244740b25fa2ded5252ca00a3178d3f665a9')
+        # indicates detached HEAD
+        self.assertEqual(self.getCurrentBranch(), None)
+        self.checkInStdout(
+            'Checked out refs/remotes/origin/feature to a detached HEAD')
+
+    def testUpdateCloneOnTrueRemoteBranch(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+
+        origin_root_dir = self.root_dir
+        self.addCleanup(gclient_utils.rmtree, origin_root_dir)
+
+        self.root_dir = tempfile.mkdtemp()
+        self.relpath = '.'
+        self.base_path = join(self.root_dir, self.relpath)
+        url_with_branch_ref = origin_root_dir + '@refs/heads/feature'
+
+        scm = gclient_scm.GitWrapper(url_with_branch_ref, self.root_dir,
+                                     self.relpath)
 
-    mockRmtree.assert_called_with(self._workdir)
-    mockCheckCallAndFilter.assert_called_with([
-        'cipd', 'describe', 'foo_package',
-        '-log-level', 'error',
-        '-version', 'foo_version',
-        '-json-output', describe_json_path,
-    ])
+        expected_file_list = [
+            join(self.base_path, "a"),
+            join(self.base_path, "b"),
+            join(self.base_path, "c")
+        ]
+        file_list = []
+        options.revision = 'unmanaged'
+        scm.update(options, (), file_list)
+
+        self.assertEqual(file_list, expected_file_list)
+        self.assertEqual(scm.revinfo(options, (), None),
+                         '9a51244740b25fa2ded5252ca00a3178d3f665a9')
+        # @refs/heads/feature is AKA @refs/remotes/origin/feature in the clone,
+        # so should be treated as such by gclient. TODO(mmoss): Though really,
+        # we should only allow DEPS to specify branches as they are known in the
+        # upstream repo, since the mapping into the local repo can be modified
+        # by users (or we might even want to change the gclient defaults at some
+        # point). But that will take more work to stop using refs/remotes/
+        # everywhere that we do (and to stop assuming a DEPS ref will always
+        # resolve locally, like when passing them to show-ref or rev-list).
+        self.assertEqual(self.getCurrentBranch(), None)
+        self.checkInStdout(
+            'Checked out refs/remotes/origin/feature to a detached HEAD')
+
+    def testUpdateUpdate(self):
+        if not self.enabled:
+            return
+        options = self.Options()
+        expected_file_list = []
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, self.relpath)
+        file_list = []
+        options.revision = 'unmanaged'
+        scm.update(options, (), file_list)
+        self.assertEqual(file_list, expected_file_list)
+        self.assertEqual(scm.revinfo(options, (), None),
+                         '069c602044c5388d2d15c3f875b057c852003458')
+        self.checkstdout('________ unmanaged solution; skipping .\n')
 
-  def testUpdate(self):
-    """Checks that update does nothing."""
-    scm = self.createScmWithPackageThatSatisfies(lambda _: True)
-    scm.update(None, (), [])
+
+class CipdWrapperTestCase(unittest.TestCase):
+    def setUp(self):
+        # Create this before setting up mocks.
+        self._cipd_root_dir = tempfile.mkdtemp()
+        self._workdir = tempfile.mkdtemp()
+
+        self._cipd_instance_url = 'https://chrome-infra-packages.appspot.com'
+        self._cipd_root = gclient_scm.CipdRoot(self._cipd_root_dir,
+                                               self._cipd_instance_url)
+        self._cipd_packages = [
+            self._cipd_root.add_package('f', 'foo_package', 'foo_version'),
+            self._cipd_root.add_package('b', 'bar_package', 'bar_version'),
+            self._cipd_root.add_package('b', 'baz_package', 'baz_version'),
+        ]
+        mock.patch('tempfile.mkdtemp', lambda: self._workdir).start()
+        mock.patch('gclient_scm.CipdRoot.add_package').start()
+        mock.patch('gclient_scm.CipdRoot.clobber').start()
+        mock.patch('gclient_scm.CipdRoot.ensure_file_resolve').start()
+        mock.patch('gclient_scm.CipdRoot.ensure').start()
+        self.addCleanup(mock.patch.stopall)
+        self.addCleanup(gclient_utils.rmtree, self._cipd_root_dir)
+        self.addCleanup(gclient_utils.rmtree, self._workdir)
+
+    def createScmWithPackageThatSatisfies(self, condition):
+        return gclient_scm.CipdWrapper(
+            url=self._cipd_instance_url,
+            root_dir=self._cipd_root_dir,
+            relpath='fake_relpath',
+            root=self._cipd_root,
+            package=self.getPackageThatSatisfies(condition))
+
+    def getPackageThatSatisfies(self, condition):
+        for p in self._cipd_packages:
+            if condition(p):
+                return p
+
+        self.fail('Unable to find a satisfactory package.')
+
+    def testRevert(self):
+        """Checks that revert does nothing."""
+        scm = self.createScmWithPackageThatSatisfies(lambda _: True)
+        scm.revert(None, (), [])
+
+    @mock.patch('gclient_scm.gclient_utils.CheckCallAndFilter')
+    @mock.patch('gclient_scm.gclient_utils.rmtree')
+    def testRevinfo(self, mockRmtree, mockCheckCallAndFilter):
+        """Checks that revinfo uses the JSON from cipd describe."""
+        scm = self.createScmWithPackageThatSatisfies(lambda _: True)
+
+        expected_revinfo = '0123456789abcdef0123456789abcdef01234567'
+        json_contents = {
+            'result': {
+                'pin': {
+                    'instance_id': expected_revinfo,
+                }
+            }
+        }
+        describe_json_path = join(self._workdir, 'describe.json')
+        with open(describe_json_path, 'w') as describe_json:
+            json.dump(json_contents, describe_json)
+
+        revinfo = scm.revinfo(None, (), [])
+        self.assertEqual(revinfo, expected_revinfo)
+
+        mockRmtree.assert_called_with(self._workdir)
+        mockCheckCallAndFilter.assert_called_with([
+            'cipd',
+            'describe',
+            'foo_package',
+            '-log-level',
+            'error',
+            '-version',
+            'foo_version',
+            '-json-output',
+            describe_json_path,
+        ])
+
+    def testUpdate(self):
+        """Checks that update does nothing."""
+        scm = self.createScmWithPackageThatSatisfies(lambda _: True)
+        scm.update(None, (), [])
 
 
 class BranchHeadsFakeRepo(fake_repos.FakeReposBase):
-  def populateGit(self):
-    # Creates a tree that looks like this:
-    #
-    #    5 refs/branch-heads/5
-    #    |
-    #    4
-    #    |
-    # 1--2--3 refs/heads/main
-    self._commit_git('repo_1', {'commit 1': 'touched'})
-    self._commit_git('repo_1', {'commit 2': 'touched'})
-    self._commit_git('repo_1', {'commit 3': 'touched'})
-    self._create_ref('repo_1', 'refs/heads/main', 3)
-
-    self._commit_git('repo_1', {'commit 4': 'touched'}, base=2)
-    self._commit_git('repo_1', {'commit 5': 'touched'}, base=2)
-    self._create_ref('repo_1', 'refs/branch-heads/5', 5)
+    def populateGit(self):
+        # Creates a tree that looks like this:
+        #
+        #    5 refs/branch-heads/5
+        #    |
+        #    4
+        #    |
+        # 1--2--3 refs/heads/main
+        self._commit_git('repo_1', {'commit 1': 'touched'})
+        self._commit_git('repo_1', {'commit 2': 'touched'})
+        self._commit_git('repo_1', {'commit 3': 'touched'})
+        self._create_ref('repo_1', 'refs/heads/main', 3)
+
+        self._commit_git('repo_1', {'commit 4': 'touched'}, base=2)
+        self._commit_git('repo_1', {'commit 5': 'touched'}, base=2)
+        self._create_ref('repo_1', 'refs/branch-heads/5', 5)
 
 
 class BranchHeadsTest(fake_repos.FakeReposTestBase):
-  FAKE_REPOS_CLASS = BranchHeadsFakeRepo
-
-  def setUp(self):
-    super(BranchHeadsTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    self.options = BaseGitWrapperTestCase.OptionsObject()
-    self.url = self.git_base + 'repo_1'
-    self.mirror = None
-    mock.patch('sys.stdout', StringIO()).start()
-    self.addCleanup(mock.patch.stopall)
-
-  def setUpMirror(self):
-    self.mirror = tempfile.mkdtemp('mirror')
-    git_cache.Mirror.SetCachePath(self.mirror)
-    self.addCleanup(gclient_utils.rmtree, self.mirror)
-    self.addCleanup(git_cache.Mirror.SetCachePath, None)
-
-  def testCheckoutBranchHeads(self):
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    self.options.revision = 'refs/branch-heads/5'
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
-
-  def testCheckoutUpdatedBranchHeads(self):
-    # Travel back in time, and set refs/branch-heads/5 to its parent.
-    subprocess2.check_call(
-        ['git', 'update-ref', 'refs/branch-heads/5', self.githash('repo_1', 4)],
-        cwd=self.url)
-
-    # Sync to refs/branch-heads/5
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    self.options.revision = 'refs/branch-heads/5'
-    scm.update(self.options, None, [])
-
-    # Set refs/branch-heads/5 back to its original value.
-    subprocess2.check_call(
-        ['git', 'update-ref', 'refs/branch-heads/5', self.githash('repo_1', 5)],
-        cwd=self.url)
-
-    # Attempt to sync to refs/branch-heads/5 again.
-    self.testCheckoutBranchHeads()
-
-  def testCheckoutBranchHeadsMirror(self):
-    self.setUpMirror()
-    self.testCheckoutBranchHeads()
-
-  def testCheckoutUpdatedBranchHeadsMirror(self):
-    self.setUpMirror()
-    self.testCheckoutUpdatedBranchHeads()
+    FAKE_REPOS_CLASS = BranchHeadsFakeRepo
+
+    def setUp(self):
+        super(BranchHeadsTest, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        self.options = BaseGitWrapperTestCase.OptionsObject()
+        self.url = self.git_base + 'repo_1'
+        self.mirror = None
+        mock.patch('sys.stdout', StringIO()).start()
+        self.addCleanup(mock.patch.stopall)
+
+    def setUpMirror(self):
+        self.mirror = tempfile.mkdtemp('mirror')
+        git_cache.Mirror.SetCachePath(self.mirror)
+        self.addCleanup(gclient_utils.rmtree, self.mirror)
+        self.addCleanup(git_cache.Mirror.SetCachePath, None)
+
+    def testCheckoutBranchHeads(self):
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        self.options.revision = 'refs/branch-heads/5'
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 5),
+                         self.gitrevparse(self.root_dir))
+
+    def testCheckoutUpdatedBranchHeads(self):
+        # Travel back in time, and set refs/branch-heads/5 to its parent.
+        subprocess2.check_call([
+            'git', 'update-ref', 'refs/branch-heads/5',
+            self.githash('repo_1', 4)
+        ],
+                               cwd=self.url)
+
+        # Sync to refs/branch-heads/5
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        self.options.revision = 'refs/branch-heads/5'
+        scm.update(self.options, None, [])
+
+        # Set refs/branch-heads/5 back to its original value.
+        subprocess2.check_call([
+            'git', 'update-ref', 'refs/branch-heads/5',
+            self.githash('repo_1', 5)
+        ],
+                               cwd=self.url)
+
+        # Attempt to sync to refs/branch-heads/5 again.
+        self.testCheckoutBranchHeads()
+
+    def testCheckoutBranchHeadsMirror(self):
+        self.setUpMirror()
+        self.testCheckoutBranchHeads()
+
+    def testCheckoutUpdatedBranchHeadsMirror(self):
+        self.setUpMirror()
+        self.testCheckoutUpdatedBranchHeads()
 
 
 class GerritChangesFakeRepo(fake_repos.FakeReposBase):
-  def populateGit(self):
-    # Creates a tree that looks like this:
-    #
-    #       6 refs/changes/35/1235/1
-    #       |
-    #       5 refs/changes/34/1234/1
-    #       |
-    # 1--2--3--4 refs/heads/main
-    #    |  |
-    #    |  11(5)--12 refs/heads/main-with-5
-    #    |
-    #    7--8--9 refs/heads/feature
-    #       |
-    #       10 refs/changes/36/1236/1
-    #
-
-    self._commit_git('repo_1', {'commit 1': 'touched'})
-    self._commit_git('repo_1', {'commit 2': 'touched'})
-    self._commit_git('repo_1', {'commit 3': 'touched'})
-    self._commit_git('repo_1', {'commit 4': 'touched'})
-    self._create_ref('repo_1', 'refs/heads/main', 4)
-
-    # Create a change on top of commit 3 that consists of two commits.
-    self._commit_git('repo_1',
-                     {'commit 5': 'touched',
-                      'change': '1234'},
-                     base=3)
-    self._create_ref('repo_1', 'refs/changes/34/1234/1', 5)
-    self._commit_git('repo_1',
-                     {'commit 6': 'touched',
-                      'change': '1235'})
-    self._create_ref('repo_1', 'refs/changes/35/1235/1', 6)
-
-    # Create a refs/heads/feature branch on top of commit 2, consisting of three
-    # commits.
-    self._commit_git('repo_1', {'commit 7': 'touched'}, base=2)
-    self._commit_git('repo_1', {'commit 8': 'touched'})
-    self._commit_git('repo_1', {'commit 9': 'touched'})
-    self._create_ref('repo_1', 'refs/heads/feature', 9)
-
-    # Create a change of top of commit 8.
-    self._commit_git('repo_1',
-                     {'commit 10': 'touched',
-                      'change': '1236'},
-                     base=8)
-    self._create_ref('repo_1', 'refs/changes/36/1236/1', 10)
-
-    # Create a refs/heads/main-with-5 on top of commit 3 which is a branch
-    # where refs/changes/34/1234/1 (commit 5) has already landed as commit 11.
-    self._commit_git('repo_1',
-                     # This is really commit 11, but has the changes of commit 5
-                     {'commit 5': 'touched',
-                      'change': '1234'},
-                     base=3)
-    self._commit_git('repo_1', {'commit 12': 'touched'})
-    self._create_ref('repo_1', 'refs/heads/main-with-5', 12)
+    def populateGit(self):
+        # Creates a tree that looks like this:
+        #
+        #       6 refs/changes/35/1235/1
+        #       |
+        #       5 refs/changes/34/1234/1
+        #       |
+        # 1--2--3--4 refs/heads/main
+        #    |  |
+        #    |  11(5)--12 refs/heads/main-with-5
+        #    |
+        #    7--8--9 refs/heads/feature
+        #       |
+        #       10 refs/changes/36/1236/1
+        #
+
+        self._commit_git('repo_1', {'commit 1': 'touched'})
+        self._commit_git('repo_1', {'commit 2': 'touched'})
+        self._commit_git('repo_1', {'commit 3': 'touched'})
+        self._commit_git('repo_1', {'commit 4': 'touched'})
+        self._create_ref('repo_1', 'refs/heads/main', 4)
+
+        # Create a change on top of commit 3 that consists of two commits.
+        self._commit_git('repo_1', {
+            'commit 5': 'touched',
+            'change': '1234'
+        },
+                         base=3)
+        self._create_ref('repo_1', 'refs/changes/34/1234/1', 5)
+        self._commit_git('repo_1', {'commit 6': 'touched', 'change': '1235'})
+        self._create_ref('repo_1', 'refs/changes/35/1235/1', 6)
+
+        # Create a refs/heads/feature branch on top of commit 2, consisting of
+        # three commits.
+        self._commit_git('repo_1', {'commit 7': 'touched'}, base=2)
+        self._commit_git('repo_1', {'commit 8': 'touched'})
+        self._commit_git('repo_1', {'commit 9': 'touched'})
+        self._create_ref('repo_1', 'refs/heads/feature', 9)
+
+        # Create a change of top of commit 8.
+        self._commit_git('repo_1', {
+            'commit 10': 'touched',
+            'change': '1236'
+        },
+                         base=8)
+        self._create_ref('repo_1', 'refs/changes/36/1236/1', 10)
+
+        # Create a refs/heads/main-with-5 on top of commit 3 which is a branch
+        # where refs/changes/34/1234/1 (commit 5) has already landed as commit
+        # 11.
+        self._commit_git(
+            'repo_1',
+            # This is really commit 11, but has the changes of commit 5
+            {
+                'commit 5': 'touched',
+                'change': '1234'
+            },
+            base=3)
+        self._commit_git('repo_1', {'commit 12': 'touched'})
+        self._create_ref('repo_1', 'refs/heads/main-with-5', 12)
 
 
 class GerritChangesTest(fake_repos.FakeReposTestBase):
-  FAKE_REPOS_CLASS = GerritChangesFakeRepo
-
-  def setUp(self):
-    super(GerritChangesTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    self.options = BaseGitWrapperTestCase.OptionsObject()
-    self.url = self.git_base + 'repo_1'
-    self.mirror = None
-    mock.patch('sys.stdout', StringIO()).start()
-    self.addCleanup(mock.patch.stopall)
-
-  def setUpMirror(self):
-    self.mirror = tempfile.mkdtemp()
-    git_cache.Mirror.SetCachePath(self.mirror)
-    self.addCleanup(gclient_utils.rmtree, self.mirror)
-    self.addCleanup(git_cache.Mirror.SetCachePath, None)
-
-  def assertCommits(self, commits):
-    """Check that all, and only |commits| are present in the current checkout.
+    FAKE_REPOS_CLASS = GerritChangesFakeRepo
+
+    def setUp(self):
+        super(GerritChangesTest, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        self.options = BaseGitWrapperTestCase.OptionsObject()
+        self.url = self.git_base + 'repo_1'
+        self.mirror = None
+        mock.patch('sys.stdout', StringIO()).start()
+        self.addCleanup(mock.patch.stopall)
+
+    def setUpMirror(self):
+        self.mirror = tempfile.mkdtemp()
+        git_cache.Mirror.SetCachePath(self.mirror)
+        self.addCleanup(gclient_utils.rmtree, self.mirror)
+        self.addCleanup(git_cache.Mirror.SetCachePath, None)
+
+    def assertCommits(self, commits):
+        """Check that all, and only |commits| are present in the current checkout.
     """
-    for i in commits:
-      name = os.path.join(self.root_dir, 'commit ' + str(i))
-      self.assertTrue(os.path.exists(name), 'Commit not found: %s' % name)
-
-    all_commits = set(range(1, len(self.FAKE_REPOS.git_hashes['repo_1'])))
-    for i in all_commits - set(commits):
-      name = os.path.join(self.root_dir, 'commit ' + str(i))
-      self.assertFalse(os.path.exists(name), 'Unexpected commit: %s' % name)
-
-  def testCanCloneGerritChange(self):
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    self.options.revision = 'refs/changes/35/1235/1'
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 6), self.gitrevparse(self.root_dir))
-
-  def testCanSyncToGerritChange(self):
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    self.options.revision = self.githash('repo_1', 1)
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 1), self.gitrevparse(self.root_dir))
-
-    self.options.revision = 'refs/changes/35/1235/1'
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 6), self.gitrevparse(self.root_dir))
-
-  def testCanCloneGerritChangeMirror(self):
-    self.setUpMirror()
-    self.testCanCloneGerritChange()
-
-  def testCanSyncToGerritChangeMirror(self):
-    self.setUpMirror()
-    self.testCanSyncToGerritChange()
-
-  def testMirrorPushUrl(self):
-    self.setUpMirror()
-
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-    self.assertIsNotNone(scm._GetMirror(self.url, self.options))
-
-    scm.update(self.options, None, file_list)
-
-    fetch_url = scm._Capture(['remote', 'get-url', 'origin'])
-    self.assertTrue(
-        fetch_url.startswith(self.mirror),
-        msg='\n'.join([
-            'Repository fetch url should be in the git cache mirror directory.',
-            '  fetch_url: %s' % fetch_url,
-            '  mirror:    %s' % self.mirror]))
-    push_url = scm._Capture(['remote', 'get-url', '--push', 'origin'])
-    self.assertEqual(push_url, self.url)
-
-  def testAppliesPatchOnTopOfMasterByDefault(self):
-    """Test the default case, where we apply a patch on top of main."""
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    # Make sure we don't specify a revision.
-    self.options.revision = None
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
-
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/main', self.options,
-        file_list)
-
-    self.assertCommits([1, 2, 3, 4, 5, 6])
-    self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
-
-  def testCheckoutOlderThanPatchBase(self):
-    """Test applying a patch on an old checkout.
+        for i in commits:
+            name = os.path.join(self.root_dir, 'commit ' + str(i))
+            self.assertTrue(os.path.exists(name), 'Commit not found: %s' % name)
+
+        all_commits = set(range(1, len(self.FAKE_REPOS.git_hashes['repo_1'])))
+        for i in all_commits - set(commits):
+            name = os.path.join(self.root_dir, 'commit ' + str(i))
+            self.assertFalse(os.path.exists(name),
+                             'Unexpected commit: %s' % name)
+
+    def testCanCloneGerritChange(self):
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        self.options.revision = 'refs/changes/35/1235/1'
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 6),
+                         self.gitrevparse(self.root_dir))
+
+    def testCanSyncToGerritChange(self):
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        self.options.revision = self.githash('repo_1', 1)
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 1),
+                         self.gitrevparse(self.root_dir))
+
+        self.options.revision = 'refs/changes/35/1235/1'
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 6),
+                         self.gitrevparse(self.root_dir))
+
+    def testCanCloneGerritChangeMirror(self):
+        self.setUpMirror()
+        self.testCanCloneGerritChange()
+
+    def testCanSyncToGerritChangeMirror(self):
+        self.setUpMirror()
+        self.testCanSyncToGerritChange()
+
+    def testMirrorPushUrl(self):
+        self.setUpMirror()
+
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+        self.assertIsNotNone(scm._GetMirror(self.url, self.options))
+
+        scm.update(self.options, None, file_list)
+
+        fetch_url = scm._Capture(['remote', 'get-url', 'origin'])
+        self.assertTrue(
+            fetch_url.startswith(self.mirror),
+            msg='\n'.join([
+                'Repository fetch url should be in the git cache mirror directory.',
+                '  fetch_url: %s' % fetch_url,
+                '  mirror:    %s' % self.mirror
+            ]))
+        push_url = scm._Capture(['remote', 'get-url', '--push', 'origin'])
+        self.assertEqual(push_url, self.url)
+
+    def testAppliesPatchOnTopOfMasterByDefault(self):
+        """Test the default case, where we apply a patch on top of main."""
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        # Make sure we don't specify a revision.
+        self.options.revision = None
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 4),
+                         self.gitrevparse(self.root_dir))
+
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/main', self.options, file_list)
+
+        self.assertCommits([1, 2, 3, 4, 5, 6])
+        self.assertEqual(self.githash('repo_1', 4),
+                         self.gitrevparse(self.root_dir))
+
+    def testCheckoutOlderThanPatchBase(self):
+        """Test applying a patch on an old checkout.
 
     We first checkout commit 1, and try to patch refs/changes/35/1235/1, which
     contains commits 5 and 6, and is based on top of commit 3.
     The final result should contain commits 1, 5 and 6, but not commits 2 or 3.
     """
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    # Sync to commit 1
-    self.options.revision = self.githash('repo_1', 1)
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 1), self.gitrevparse(self.root_dir))
-
-    # Apply the change on top of that.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/main', self.options,
-        file_list)
-
-    self.assertCommits([1, 5, 6])
-    self.assertEqual(self.githash('repo_1', 1), self.gitrevparse(self.root_dir))
-
-  def testCheckoutOriginFeature(self):
-    """Tests that we can apply a patch on a branch other than main."""
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    # Sync to remote's refs/heads/feature
-    self.options.revision = 'refs/heads/feature'
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 9), self.gitrevparse(self.root_dir))
-
-    # Apply the change on top of that.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/36/1236/1', 'refs/heads/feature', self.options,
-        file_list)
-
-    self.assertCommits([1, 2, 7, 8, 9, 10])
-    self.assertEqual(self.githash('repo_1', 9), self.gitrevparse(self.root_dir))
-
-  def testCheckoutOriginFeatureOnOldRevision(self):
-    """Tests that we can apply a patch on an old checkout, on a branch other
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        # Sync to commit 1
+        self.options.revision = self.githash('repo_1', 1)
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 1),
+                         self.gitrevparse(self.root_dir))
+
+        # Apply the change on top of that.
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/main', self.options, file_list)
+
+        self.assertCommits([1, 5, 6])
+        self.assertEqual(self.githash('repo_1', 1),
+                         self.gitrevparse(self.root_dir))
+
+    def testCheckoutOriginFeature(self):
+        """Tests that we can apply a patch on a branch other than main."""
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        # Sync to remote's refs/heads/feature
+        self.options.revision = 'refs/heads/feature'
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 9),
+                         self.gitrevparse(self.root_dir))
+
+        # Apply the change on top of that.
+        scm.apply_patch_ref(self.url, 'refs/changes/36/1236/1',
+                            'refs/heads/feature', self.options, file_list)
+
+        self.assertCommits([1, 2, 7, 8, 9, 10])
+        self.assertEqual(self.githash('repo_1', 9),
+                         self.gitrevparse(self.root_dir))
+
+    def testCheckoutOriginFeatureOnOldRevision(self):
+        """Tests that we can apply a patch on an old checkout, on a branch other
     than main."""
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    # Sync to remote's refs/heads/feature on an old revision
-    self.options.revision = self.githash('repo_1', 7)
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 7), self.gitrevparse(self.root_dir))
-
-    # Apply the change on top of that.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/36/1236/1', 'refs/heads/feature', self.options,
-        file_list)
-
-    # We shouldn't have rebased on top of 2 (which is the merge base between
-    # remote's main branch and the change) but on top of 7 (which is the
-    # merge base between remote's feature branch and the change).
-    self.assertCommits([1, 2, 7, 10])
-    self.assertEqual(self.githash('repo_1', 7), self.gitrevparse(self.root_dir))
-
-  def testCheckoutOriginFeaturePatchBranch(self):
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    # Sync to the hash instead of remote's refs/heads/feature.
-    self.options.revision = self.githash('repo_1', 9)
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 9), self.gitrevparse(self.root_dir))
-
-    # Apply refs/changes/34/1234/1, created for remote's main branch on top of
-    # remote's feature branch.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/main', self.options,
-        file_list)
-
-    # Commits 5 and 6 are part of the patch, and commits 1, 2, 7, 8 and 9 are
-    # part of remote's feature branch.
-    self.assertCommits([1, 2, 5, 6, 7, 8, 9])
-    self.assertEqual(self.githash('repo_1', 9), self.gitrevparse(self.root_dir))
-
-  def testDoesntRebasePatchMaster(self):
-    """Tests that we can apply a patch without rebasing it.
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        # Sync to remote's refs/heads/feature on an old revision
+        self.options.revision = self.githash('repo_1', 7)
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 7),
+                         self.gitrevparse(self.root_dir))
+
+        # Apply the change on top of that.
+        scm.apply_patch_ref(self.url, 'refs/changes/36/1236/1',
+                            'refs/heads/feature', self.options, file_list)
+
+        # We shouldn't have rebased on top of 2 (which is the merge base between
+        # remote's main branch and the change) but on top of 7 (which is the
+        # merge base between remote's feature branch and the change).
+        self.assertCommits([1, 2, 7, 10])
+        self.assertEqual(self.githash('repo_1', 7),
+                         self.gitrevparse(self.root_dir))
+
+    def testCheckoutOriginFeaturePatchBranch(self):
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        # Sync to the hash instead of remote's refs/heads/feature.
+        self.options.revision = self.githash('repo_1', 9)
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 9),
+                         self.gitrevparse(self.root_dir))
+
+        # Apply refs/changes/34/1234/1, created for remote's main branch on top
+        # of remote's feature branch.
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/main', self.options, file_list)
+
+        # Commits 5 and 6 are part of the patch, and commits 1, 2, 7, 8 and 9
+        # are part of remote's feature branch.
+        self.assertCommits([1, 2, 5, 6, 7, 8, 9])
+        self.assertEqual(self.githash('repo_1', 9),
+                         self.gitrevparse(self.root_dir))
+
+    def testDoesntRebasePatchMaster(self):
+        """Tests that we can apply a patch without rebasing it.
     """
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
 
-    self.options.rebase_patch_ref = False
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
+        self.options.rebase_patch_ref = False
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 4),
+                         self.gitrevparse(self.root_dir))
 
-    # Apply the change on top of that.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/main', self.options,
-        file_list)
+        # Apply the change on top of that.
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/main', self.options, file_list)
 
-    self.assertCommits([1, 2, 3, 5, 6])
-    self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
+        self.assertCommits([1, 2, 3, 5, 6])
+        self.assertEqual(self.githash('repo_1', 5),
+                         self.gitrevparse(self.root_dir))
 
-  def testDoesntRebasePatchOldCheckout(self):
-    """Tests that we can apply a patch without rebasing it on an old checkout.
+    def testDoesntRebasePatchOldCheckout(self):
+        """Tests that we can apply a patch without rebasing it on an old checkout.
     """
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    # Sync to commit 1
-    self.options.revision = self.githash('repo_1', 1)
-    self.options.rebase_patch_ref = False
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 1), self.gitrevparse(self.root_dir))
-
-    # Apply the change on top of that.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/main', self.options,
-        file_list)
-
-    self.assertCommits([1, 2, 3, 5, 6])
-    self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
-
-  def testDoesntSoftResetIfNotAskedTo(self):
-    """Test that we can apply a patch without doing a soft reset."""
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    self.options.reset_patch_ref = False
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
-
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/main', self.options,
-        file_list)
-
-    self.assertCommits([1, 2, 3, 4, 5, 6])
-    # The commit hash after cherry-picking is not known, but it must be
-    # different from what the repo was synced at before patching.
-    self.assertNotEqual(self.githash('repo_1', 4),
-                        self.gitrevparse(self.root_dir))
-
-  @mock.patch('gerrit_util.GetChange', return_value={'topic': 'test_topic'})
-  @mock.patch('gerrit_util.QueryChanges', return_value=[
-      {'_number': 1234},
-      {'_number': 1235, 'current_revision': 'abc',
-       'revisions': {'abc': {'ref': 'refs/changes/35/1235/1'}}}])
-  def testDownloadTopics(self, query_changes_mock, get_change_mock):
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    self.options.revision = 'refs/changes/34/1234/1'
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
-
-    # pylint: disable=attribute-defined-outside-init
-    self.options.download_topics = True
-    scm.url = 'https://test-repo.googlesource.com/repo_1.git'
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/34/1234/1', 'refs/heads/main', self.options,
-        file_list)
-
-    get_change_mock.assert_called_once_with(
-        mock.ANY, '1234')
-    query_changes_mock.assert_called_once_with(
-        mock.ANY,
-        [('topic', 'test_topic'), ('status', 'open'), ('repo', 'repo_1')],
-        o_params=['ALL_REVISIONS'])
-
-    self.assertCommits([1, 2, 3, 5, 6])
-    # The commit hash after the two cherry-picks is not known, but it must be
-    # different from what the repo was synced at before patching.
-    self.assertNotEqual(self.githash('repo_1', 4),
-                        self.gitrevparse(self.root_dir))
-
-  def testRecoversAfterPatchFailure(self):
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    self.options.revision = 'refs/changes/34/1234/1'
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
-
-    # Checkout 'refs/changes/34/1234/1' modifies the 'change' file, so trying to
-    # patch 'refs/changes/36/1236/1' creates a patch failure.
-    with self.assertRaises(subprocess2.CalledProcessError) as cm:
-      scm.apply_patch_ref(
-          self.url, 'refs/changes/36/1236/1', 'refs/heads/main', self.options,
-          file_list)
-    self.assertEqual(cm.exception.cmd[:2], ['git', 'cherry-pick'])
-    self.assertIn(b'error: could not apply', cm.exception.stderr)
-
-    # Try to apply 'refs/changes/35/1235/1', which doesn't have a merge
-    # conflict.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/main', self.options,
-        file_list)
-    self.assertCommits([1, 2, 3, 5, 6])
-    self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
-
-  def testIgnoresAlreadyMergedCommits(self):
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    self.options.revision = 'refs/heads/main-with-5'
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 12),
-                     self.gitrevparse(self.root_dir))
-
-    # When we try 'refs/changes/35/1235/1' on top of 'refs/heads/feature',
-    # 'refs/changes/34/1234/1' will be an empty commit, since the changes were
-    # already present in the tree as commit 11.
-    # Make sure we deal with this gracefully.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/feature', self.options,
-        file_list)
-    self.assertCommits([1, 2, 3, 5, 6, 12])
-    self.assertEqual(self.githash('repo_1', 12),
-                     self.gitrevparse(self.root_dir))
-
-  def testRecoversFromExistingCherryPick(self):
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    self.options.revision = 'refs/changes/34/1234/1'
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
-
-    # Checkout 'refs/changes/34/1234/1' modifies the 'change' file, so trying to
-    # cherry-pick 'refs/changes/36/1236/1' raises an error.
-    scm._Run(['fetch', 'origin', 'refs/changes/36/1236/1'], self.options)
-    with self.assertRaises(subprocess2.CalledProcessError) as cm:
-      scm._Run(['cherry-pick', 'FETCH_HEAD'], self.options)
-    self.assertEqual(cm.exception.cmd[:2], ['git', 'cherry-pick'])
-
-    # Try to apply 'refs/changes/35/1235/1', which doesn't have a merge
-    # conflict.
-    scm.apply_patch_ref(
-        self.url, 'refs/changes/35/1235/1', 'refs/heads/main', self.options,
-        file_list)
-    self.assertCommits([1, 2, 3, 5, 6])
-    self.assertEqual(self.githash('repo_1', 5), self.gitrevparse(self.root_dir))
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        # Sync to commit 1
+        self.options.revision = self.githash('repo_1', 1)
+        self.options.rebase_patch_ref = False
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 1),
+                         self.gitrevparse(self.root_dir))
+
+        # Apply the change on top of that.
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/main', self.options, file_list)
+
+        self.assertCommits([1, 2, 3, 5, 6])
+        self.assertEqual(self.githash('repo_1', 5),
+                         self.gitrevparse(self.root_dir))
+
+    def testDoesntSoftResetIfNotAskedTo(self):
+        """Test that we can apply a patch without doing a soft reset."""
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        self.options.reset_patch_ref = False
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 4),
+                         self.gitrevparse(self.root_dir))
+
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/main', self.options, file_list)
+
+        self.assertCommits([1, 2, 3, 4, 5, 6])
+        # The commit hash after cherry-picking is not known, but it must be
+        # different from what the repo was synced at before patching.
+        self.assertNotEqual(self.githash('repo_1', 4),
+                            self.gitrevparse(self.root_dir))
+
+    @mock.patch('gerrit_util.GetChange', return_value={'topic': 'test_topic'})
+    @mock.patch('gerrit_util.QueryChanges',
+                return_value=[{
+                    '_number': 1234
+                }, {
+                    '_number': 1235,
+                    'current_revision': 'abc',
+                    'revisions': {
+                        'abc': {
+                            'ref': 'refs/changes/35/1235/1'
+                        }
+                    }
+                }])
+    def testDownloadTopics(self, query_changes_mock, get_change_mock):
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        self.options.revision = 'refs/changes/34/1234/1'
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 5),
+                         self.gitrevparse(self.root_dir))
+
+        # pylint: disable=attribute-defined-outside-init
+        self.options.download_topics = True
+        scm.url = 'https://test-repo.googlesource.com/repo_1.git'
+        scm.apply_patch_ref(self.url, 'refs/changes/34/1234/1',
+                            'refs/heads/main', self.options, file_list)
+
+        get_change_mock.assert_called_once_with(mock.ANY, '1234')
+        query_changes_mock.assert_called_once_with(mock.ANY,
+                                                   [('topic', 'test_topic'),
+                                                    ('status', 'open'),
+                                                    ('repo', 'repo_1')],
+                                                   o_params=['ALL_REVISIONS'])
+
+        self.assertCommits([1, 2, 3, 5, 6])
+        # The commit hash after the two cherry-picks is not known, but it must
+        # be different from what the repo was synced at before patching.
+        self.assertNotEqual(self.githash('repo_1', 4),
+                            self.gitrevparse(self.root_dir))
+
+    def testRecoversAfterPatchFailure(self):
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        self.options.revision = 'refs/changes/34/1234/1'
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 5),
+                         self.gitrevparse(self.root_dir))
+
+        # Checkout 'refs/changes/34/1234/1' modifies the 'change' file, so
+        # trying to patch 'refs/changes/36/1236/1' creates a patch failure.
+        with self.assertRaises(subprocess2.CalledProcessError) as cm:
+            scm.apply_patch_ref(self.url, 'refs/changes/36/1236/1',
+                                'refs/heads/main', self.options, file_list)
+        self.assertEqual(cm.exception.cmd[:2], ['git', 'cherry-pick'])
+        self.assertIn(b'error: could not apply', cm.exception.stderr)
+
+        # Try to apply 'refs/changes/35/1235/1', which doesn't have a merge
+        # conflict.
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/main', self.options, file_list)
+        self.assertCommits([1, 2, 3, 5, 6])
+        self.assertEqual(self.githash('repo_1', 5),
+                         self.gitrevparse(self.root_dir))
+
+    def testIgnoresAlreadyMergedCommits(self):
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        self.options.revision = 'refs/heads/main-with-5'
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 12),
+                         self.gitrevparse(self.root_dir))
+
+        # When we try 'refs/changes/35/1235/1' on top of 'refs/heads/feature',
+        # 'refs/changes/34/1234/1' will be an empty commit, since the changes
+        # were already present in the tree as commit 11. Make sure we deal with
+        # this gracefully.
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/feature', self.options, file_list)
+        self.assertCommits([1, 2, 3, 5, 6, 12])
+        self.assertEqual(self.githash('repo_1', 12),
+                         self.gitrevparse(self.root_dir))
+
+    def testRecoversFromExistingCherryPick(self):
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        self.options.revision = 'refs/changes/34/1234/1'
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 5),
+                         self.gitrevparse(self.root_dir))
+
+        # Checkout 'refs/changes/34/1234/1' modifies the 'change' file, so
+        # trying to cherry-pick 'refs/changes/36/1236/1' raises an error.
+        scm._Run(['fetch', 'origin', 'refs/changes/36/1236/1'], self.options)
+        with self.assertRaises(subprocess2.CalledProcessError) as cm:
+            scm._Run(['cherry-pick', 'FETCH_HEAD'], self.options)
+        self.assertEqual(cm.exception.cmd[:2], ['git', 'cherry-pick'])
+
+        # Try to apply 'refs/changes/35/1235/1', which doesn't have a merge
+        # conflict.
+        scm.apply_patch_ref(self.url, 'refs/changes/35/1235/1',
+                            'refs/heads/main', self.options, file_list)
+        self.assertCommits([1, 2, 3, 5, 6])
+        self.assertEqual(self.githash('repo_1', 5),
+                         self.gitrevparse(self.root_dir))
 
 
 class DepsChangesFakeRepo(fake_repos.FakeReposBase):
-  def populateGit(self):
-    self._commit_git('repo_1', {'DEPS': 'versionA', 'doesnotmatter': 'B'})
-    self._commit_git('repo_1', {'DEPS': 'versionA', 'doesnotmatter': 'C'})
+    def populateGit(self):
+        self._commit_git('repo_1', {'DEPS': 'versionA', 'doesnotmatter': 'B'})
+        self._commit_git('repo_1', {'DEPS': 'versionA', 'doesnotmatter': 'C'})
 
-    self._commit_git('repo_1', {'DEPS': 'versionB'})
-    self._commit_git('repo_1', {'DEPS': 'versionA', 'doesnotmatter': 'C'})
-    self._create_ref('repo_1', 'refs/heads/main', 4)
+        self._commit_git('repo_1', {'DEPS': 'versionB'})
+        self._commit_git('repo_1', {'DEPS': 'versionA', 'doesnotmatter': 'C'})
+        self._create_ref('repo_1', 'refs/heads/main', 4)
 
 
 class CheckDiffTest(fake_repos.FakeReposTestBase):
-  FAKE_REPOS_CLASS = DepsChangesFakeRepo
-
-  def setUp(self):
-    super(CheckDiffTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    self.options = BaseGitWrapperTestCase.OptionsObject()
-    self.url = self.git_base + 'repo_1'
-    self.mirror = None
-    mock.patch('sys.stdout', StringIO()).start()
-    self.addCleanup(mock.patch.stopall)
-
-  def setUpMirror(self):
-    self.mirror = tempfile.mkdtemp()
-    git_cache.Mirror.SetCachePath(self.mirror)
-    self.addCleanup(gclient_utils.rmtree, self.mirror)
-    self.addCleanup(git_cache.Mirror.SetCachePath, None)
-
-  def testCheckDiff(self):
-    """Correctly check for diffs."""
-    scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
-    file_list = []
-
-    # Make sure we don't specify a revision.
-    self.options.revision = None
-    scm.update(self.options, None, file_list)
-    self.assertEqual(self.githash('repo_1', 4), self.gitrevparse(self.root_dir))
-
-    self.assertFalse(scm.check_diff(self.githash('repo_1', 1), files=['DEPS']))
-    self.assertTrue(scm.check_diff(self.githash('repo_1', 1)))
-    self.assertTrue(scm.check_diff(self.githash('repo_1', 3), files=['DEPS']))
-
-    self.assertFalse(
-        scm.check_diff(self.githash('repo_1', 2),
-                       files=['DEPS', 'doesnotmatter']))
-    self.assertFalse(scm.check_diff(self.githash('repo_1', 2)))
+    FAKE_REPOS_CLASS = DepsChangesFakeRepo
+
+    def setUp(self):
+        super(CheckDiffTest, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        self.options = BaseGitWrapperTestCase.OptionsObject()
+        self.url = self.git_base + 'repo_1'
+        self.mirror = None
+        mock.patch('sys.stdout', StringIO()).start()
+        self.addCleanup(mock.patch.stopall)
+
+    def setUpMirror(self):
+        self.mirror = tempfile.mkdtemp()
+        git_cache.Mirror.SetCachePath(self.mirror)
+        self.addCleanup(gclient_utils.rmtree, self.mirror)
+        self.addCleanup(git_cache.Mirror.SetCachePath, None)
+
+    def testCheckDiff(self):
+        """Correctly check for diffs."""
+        scm = gclient_scm.GitWrapper(self.url, self.root_dir, '.')
+        file_list = []
+
+        # Make sure we don't specify a revision.
+        self.options.revision = None
+        scm.update(self.options, None, file_list)
+        self.assertEqual(self.githash('repo_1', 4),
+                         self.gitrevparse(self.root_dir))
+
+        self.assertFalse(
+            scm.check_diff(self.githash('repo_1', 1), files=['DEPS']))
+        self.assertTrue(scm.check_diff(self.githash('repo_1', 1)))
+        self.assertTrue(
+            scm.check_diff(self.githash('repo_1', 3), files=['DEPS']))
+
+        self.assertFalse(
+            scm.check_diff(self.githash('repo_1', 2),
+                           files=['DEPS', 'doesnotmatter']))
+        self.assertFalse(scm.check_diff(self.githash('repo_1', 2)))
 
 
 if 'unittest.util' in __import__('sys').modules:
-  # Show full diff in self.assertEqual.
-  __import__('sys').modules['unittest.util']._MAX_LENGTH = 999999999
+    # Show full diff in self.assertEqual.
+    __import__('sys').modules['unittest.util']._MAX_LENGTH = 999999999
 
 if __name__ == '__main__':
-  level = logging.DEBUG if '-v' in sys.argv else logging.FATAL
-  logging.basicConfig(
-      level=level,
-      format='%(asctime).19s %(levelname)s %(filename)s:'
-             '%(lineno)s %(message)s')
-  unittest.main()
+    level = logging.DEBUG if '-v' in sys.argv else logging.FATAL
+    logging.basicConfig(level=level,
+                        format='%(asctime).19s %(levelname)s %(filename)s:'
+                        '%(lineno)s %(message)s')
+    unittest.main()
 
 # vim: ts=2:sw=2:tw=80:et:

+ 160 - 144
tests/gclient_smoketest.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Smoke tests for gclient.py.
 
 Shell out 'gclient' and run basic conformance tests.
@@ -15,158 +14,175 @@ import unittest
 
 import gclient_smoketest_base
 
-
 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 sys.path.insert(0, ROOT_DIR)
 
 import subprocess2
 from testing_support.fake_repos import join, write
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class GClientSmoke(gclient_smoketest_base.GClientSmokeBase):
-  """Doesn't require git-daemon."""
-  @property
-  def git_base(self):
-    return 'git://random.server/git/'
-
-  def testNotConfigured(self):
-    res = ('', 'Error: client not configured; see \'gclient config\'\n', 1)
-    self.check(res, self.gclient(['diff'], error_ok=True))
-    self.check(res, self.gclient(['pack'], error_ok=True))
-    self.check(res, self.gclient(['revert'], error_ok=True))
-    self.check(res, self.gclient(['revinfo'], error_ok=True))
-    self.check(res, self.gclient(['runhooks'], error_ok=True))
-    self.check(res, self.gclient(['status'], error_ok=True))
-    self.check(res, self.gclient(['sync'], error_ok=True))
-    self.check(res, self.gclient(['update'], error_ok=True))
-
-  def testConfig(self):
-    # Get any bootstrapping out of the way.
-    results = self.gclient(['version'])
-
-    p = join(self.root_dir, '.gclient')
-    def test(cmd, expected):
-      if os.path.exists(p):
+    """Doesn't require git-daemon."""
+    @property
+    def git_base(self):
+        return 'git://random.server/git/'
+
+    def testNotConfigured(self):
+        res = ('', 'Error: client not configured; see \'gclient config\'\n', 1)
+        self.check(res, self.gclient(['diff'], error_ok=True))
+        self.check(res, self.gclient(['pack'], error_ok=True))
+        self.check(res, self.gclient(['revert'], error_ok=True))
+        self.check(res, self.gclient(['revinfo'], error_ok=True))
+        self.check(res, self.gclient(['runhooks'], error_ok=True))
+        self.check(res, self.gclient(['status'], error_ok=True))
+        self.check(res, self.gclient(['sync'], error_ok=True))
+        self.check(res, self.gclient(['update'], error_ok=True))
+
+    def testConfig(self):
+        # Get any bootstrapping out of the way.
+        results = self.gclient(['version'])
+
+        p = join(self.root_dir, '.gclient')
+
+        def test(cmd, expected):
+            if os.path.exists(p):
+                os.remove(p)
+            results = self.gclient(cmd)
+            self.check(('', '', 0), results)
+            with open(p, 'r') as f:
+                actual = {}
+                exec(f.read(), {}, actual)
+                self.assertEqual(expected, actual)
+
+        test(
+            ['config', self.git_base + 'src/'], {
+                'solutions': [{
+                    'name': 'src',
+                    'url': self.git_base + 'src',
+                    'deps_file': 'DEPS',
+                    'managed': True,
+                    'custom_deps': {},
+                    'custom_vars': {},
+                }],
+            })
+
+        test(
+            [
+                'config', self.git_base + 'repo_1', '--name', 'src',
+                '--cache-dir', 'none'
+            ], {
+                'solutions': [{
+                    'name': 'src',
+                    'url': self.git_base + 'repo_1',
+                    'deps_file': 'DEPS',
+                    'managed': True,
+                    'custom_deps': {},
+                    'custom_vars': {},
+                }],
+                'cache_dir':
+                None
+            })
+
+        test(
+            [
+                'config', 'https://example.com/foo', 'faa', '--cache-dir',
+                'something'
+            ], {
+                'solutions': [{
+                    'name': 'foo',
+                    'url': 'https://example.com/foo',
+                    'deps_file': 'DEPS',
+                    'managed': True,
+                    'custom_deps': {},
+                    'custom_vars': {},
+                }],
+                'cache_dir':
+                'something'
+            })
+
+        test(
+            ['config', 'https://example.com/foo', '--deps', 'blah'], {
+                'solutions': [{
+                    'name': 'foo',
+                    'url': 'https://example.com/foo',
+                    'deps_file': 'blah',
+                    'managed': True,
+                    'custom_deps': {},
+                    'custom_vars': {},
+                }]
+            })
+
+        test(
+            [
+                'config', self.git_base + 'src/', '--custom-var',
+                'bool_var=True', '--custom-var', 'str_var="abc"'
+            ], {
+                'solutions': [{
+                    'name': 'src',
+                    'url': self.git_base + 'src',
+                    'deps_file': 'DEPS',
+                    'managed': True,
+                    'custom_deps': {},
+                    'custom_vars': {
+                        'bool_var': True,
+                        'str_var': 'abc',
+                    },
+                }]
+            })
+
+        test(['config', '--spec', 'bah = ["blah blah"]'],
+             {'bah': ["blah blah"]})
+
         os.remove(p)
-      results = self.gclient(cmd)
-      self.check(('', '', 0), results)
-      with open(p, 'r') as f:
-        actual = {}
-        exec(f.read(), {}, actual)
-        self.assertEqual(expected, actual)
-
-    test(
-        ['config', self.git_base + 'src/'],
-        {
-            'solutions': [{
-                'name': 'src',
-                'url': self.git_base + 'src',
-                'deps_file': 'DEPS',
-                'managed': True,
-                'custom_deps': {},
-                'custom_vars': {},
-            }],
-        })
-
-    test(['config', self.git_base + 'repo_1',
-          '--name', 'src',
-          '--cache-dir', 'none'],
-         {'solutions': [{
-             'name': 'src',
-             'url': self.git_base + 'repo_1',
-             'deps_file': 'DEPS',
-             'managed': True,
-             'custom_deps': {},
-             'custom_vars': {},
-          }],
-          'cache_dir': None})
-
-    test(['config', 'https://example.com/foo', 'faa',
-          '--cache-dir', 'something'],
-         {'solutions': [{
-             'name': 'foo',
-             'url': 'https://example.com/foo',
-             'deps_file': 'DEPS',
-             'managed': True,
-             'custom_deps': {},
-             'custom_vars': {},
-          }],
-          'cache_dir': 'something'})
-
-    test(['config', 'https://example.com/foo',
-          '--deps', 'blah'],
-         {'solutions': [{
-             'name': 'foo',
-             'url': 'https://example.com/foo',
-             'deps_file': 'blah',
-             'managed': True,
-             'custom_deps': {},
-             'custom_vars': {},
-          }]})
-
-    test(['config', self.git_base + 'src/',
-          '--custom-var', 'bool_var=True',
-          '--custom-var', 'str_var="abc"'],
-         {'solutions': [{
-             'name': 'src',
-             'url': self.git_base + 'src',
-             'deps_file': 'DEPS',
-             'managed': True,
-             'custom_deps': {},
-             'custom_vars': {
-                 'bool_var': True,
-                 'str_var': 'abc',
-             },
-          }]})
-
-    test(['config', '--spec', 'bah = ["blah blah"]'], {'bah': ["blah blah"]})
-
-    os.remove(p)
-    results = self.gclient(['config', 'foo', 'faa', 'fuu'], error_ok=True)
-    err = ('Usage: gclient.py config [options] [url]\n\n'
-           'gclient.py: error: Inconsistent arguments. Use either --spec or one'
-           ' or 2 args\n')
-    self.check(('', err, 2), results)
-    self.assertFalse(os.path.exists(join(self.root_dir, '.gclient')))
-
-  def testSolutionNone(self):
-    results = self.gclient(['config', '--spec',
-                            'solutions=[{"name": "./", "url": None}]'])
-    self.check(('', '', 0), results)
-    results = self.gclient(['sync'])
-    self.check(('', '', 0), results)
-    self.assertTree({})
-    results = self.gclient(['revinfo'])
-    self.check(('./: None\n', '', 0), results)
-    self.check(('', '', 0), self.gclient(['diff']))
-    self.assertTree({})
-    self.check(('', '', 0), self.gclient(['pack']))
-    self.check(('', '', 0), self.gclient(['revert']))
-    self.assertTree({})
-    self.check(('', '', 0), self.gclient(['runhooks']))
-    self.assertTree({})
-    self.check(('', '', 0), self.gclient(['status']))
-
-  def testDifferentTopLevelDirectory(self):
-    # Check that even if the .gclient file does not mention the directory src
-    # itself, but it is included via dependencies, the .gclient file is used.
-    self.gclient(['config', self.git_base + 'src.DEPS'])
-    deps = join(self.root_dir, 'src.DEPS')
-    os.mkdir(deps)
-    subprocess2.check_output(['git', 'init'], cwd=deps)
-    write(join(deps, 'DEPS'),
-        'deps = { "src": "%ssrc" }' % (self.git_base))
-    subprocess2.check_output(['git', 'add', 'DEPS'], cwd=deps)
-    subprocess2.check_output(
-        ['git', 'commit', '-a', '-m', 'DEPS file'], cwd=deps)
-    src = join(self.root_dir, 'src')
-    os.mkdir(src)
-    subprocess2.check_output(['git', 'init'], cwd=src)
-    res = self.gclient(['status', '--jobs', '1', '-v'], src)
-    self.checkBlock(res[0], [('running', deps), ('running', src)])
+        results = self.gclient(['config', 'foo', 'faa', 'fuu'], error_ok=True)
+        err = (
+            'Usage: gclient.py config [options] [url]\n\n'
+            'gclient.py: error: Inconsistent arguments. Use either --spec or one'
+            ' or 2 args\n')
+        self.check(('', err, 2), results)
+        self.assertFalse(os.path.exists(join(self.root_dir, '.gclient')))
+
+    def testSolutionNone(self):
+        results = self.gclient(
+            ['config', '--spec', 'solutions=[{"name": "./", "url": None}]'])
+        self.check(('', '', 0), results)
+        results = self.gclient(['sync'])
+        self.check(('', '', 0), results)
+        self.assertTree({})
+        results = self.gclient(['revinfo'])
+        self.check(('./: None\n', '', 0), results)
+        self.check(('', '', 0), self.gclient(['diff']))
+        self.assertTree({})
+        self.check(('', '', 0), self.gclient(['pack']))
+        self.check(('', '', 0), self.gclient(['revert']))
+        self.assertTree({})
+        self.check(('', '', 0), self.gclient(['runhooks']))
+        self.assertTree({})
+        self.check(('', '', 0), self.gclient(['status']))
+
+    def testDifferentTopLevelDirectory(self):
+        # Check that even if the .gclient file does not mention the directory
+        # src itself, but it is included via dependencies, the .gclient file is
+        # used.
+        self.gclient(['config', self.git_base + 'src.DEPS'])
+        deps = join(self.root_dir, 'src.DEPS')
+        os.mkdir(deps)
+        subprocess2.check_output(['git', 'init'], cwd=deps)
+        write(join(deps, 'DEPS'), 'deps = { "src": "%ssrc" }' % (self.git_base))
+        subprocess2.check_output(['git', 'add', 'DEPS'], cwd=deps)
+        subprocess2.check_output(['git', 'commit', '-a', '-m', 'DEPS file'],
+                                 cwd=deps)
+        src = join(self.root_dir, 'src')
+        os.mkdir(src)
+        subprocess2.check_output(['git', 'init'], cwd=src)
+        res = self.gclient(['status', '--jobs', '1', '-v'], src)
+        self.checkBlock(res[0], [('running', deps), ('running', src)])
 
 
 if __name__ == '__main__':
-  if '-v' in sys.argv:
-    logging.basicConfig(level=logging.DEBUG)
-  unittest.main()
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()

+ 126 - 118
tests/gclient_smoketest_base.py

@@ -8,7 +8,6 @@ import os
 import re
 import sys
 
-
 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 GCLIENT_PATH = os.path.join(ROOT_DIR, 'gclient')
 sys.path.insert(0, ROOT_DIR)
@@ -18,68 +17,72 @@ from testing_support import fake_repos
 
 
 class GClientSmokeBase(fake_repos.FakeReposTestBase):
-  def setUp(self):
-    super(GClientSmokeBase, self).setUp()
-    # Make sure it doesn't try to auto update when testing!
-    self.env = os.environ.copy()
-    self.env['DEPOT_TOOLS_UPDATE'] = '0'
-    self.env['DEPOT_TOOLS_METRICS'] = '0'
-    # Suppress Python 3 warnings and other test undesirables.
-    self.env['GCLIENT_TEST'] = '1'
-    self.maxDiff = None
-
-  def gclient(self, cmd, cwd=None, error_ok=False):
-    if not cwd:
-      cwd = self.root_dir
-    cmd = [GCLIENT_PATH] + cmd
-    process = subprocess2.Popen(
-        cmd, cwd=cwd, env=self.env, stdout=subprocess2.PIPE,
-        stderr=subprocess2.PIPE, universal_newlines=True)
-    (stdout, stderr) = process.communicate()
-    logging.debug("XXX: %s\n%s\nXXX" % (' '.join(cmd), stdout))
-    logging.debug("YYY: %s\n%s\nYYY" % (' '.join(cmd), stderr))
-
-    if not error_ok:
-      self.assertEqual(0, process.returncode, stderr)
-
-    return (stdout.replace('\r\n', '\n'), stderr.replace('\r\n', '\n'),
-            process.returncode)
-
-  def untangle(self, stdout):
-    """Separates output based on thread IDs."""
-    tasks = {}
-    remaining = []
-    task_id = 0
-    for line in stdout.splitlines(False):
-      m = re.match(r'^(\d)+>(.*)$', line)
-      if not m:
-        if task_id:
-          # Lines broken with carriage breaks don't have a thread ID, but belong
-          # to the last seen thread ID.
-          tasks.setdefault(task_id, []).append(line)
-        else:
-          remaining.append(line)
-      else:
-        self.assertEqual([], remaining)
-        task_id = int(m.group(1))
-        tasks.setdefault(task_id, []).append(m.group(2))
-    out = []
-    for key in sorted(tasks.keys()):
-      out.extend(tasks[key])
-    out.extend(remaining)
-    return '\n'.join(out)
-
-  def parseGclient(self, cmd, items, expected_stderr='', untangle=False):
-    """Parse gclient's output to make it easier to test.
+    def setUp(self):
+        super(GClientSmokeBase, self).setUp()
+        # Make sure it doesn't try to auto update when testing!
+        self.env = os.environ.copy()
+        self.env['DEPOT_TOOLS_UPDATE'] = '0'
+        self.env['DEPOT_TOOLS_METRICS'] = '0'
+        # Suppress Python 3 warnings and other test undesirables.
+        self.env['GCLIENT_TEST'] = '1'
+        self.maxDiff = None
+
+    def gclient(self, cmd, cwd=None, error_ok=False):
+        if not cwd:
+            cwd = self.root_dir
+        cmd = [GCLIENT_PATH] + cmd
+        process = subprocess2.Popen(cmd,
+                                    cwd=cwd,
+                                    env=self.env,
+                                    stdout=subprocess2.PIPE,
+                                    stderr=subprocess2.PIPE,
+                                    universal_newlines=True)
+        (stdout, stderr) = process.communicate()
+        logging.debug("XXX: %s\n%s\nXXX" % (' '.join(cmd), stdout))
+        logging.debug("YYY: %s\n%s\nYYY" % (' '.join(cmd), stderr))
+
+        if not error_ok:
+            self.assertEqual(0, process.returncode, stderr)
+
+        return (stdout.replace('\r\n',
+                               '\n'), stderr.replace('\r\n',
+                                                     '\n'), process.returncode)
+
+    def untangle(self, stdout):
+        """Separates output based on thread IDs."""
+        tasks = {}
+        remaining = []
+        task_id = 0
+        for line in stdout.splitlines(False):
+            m = re.match(r'^(\d)+>(.*)$', line)
+            if not m:
+                if task_id:
+                    # Lines broken with carriage breaks don't have a thread ID,
+                    # but belong to the last seen thread ID.
+                    tasks.setdefault(task_id, []).append(line)
+                else:
+                    remaining.append(line)
+            else:
+                self.assertEqual([], remaining)
+                task_id = int(m.group(1))
+                tasks.setdefault(task_id, []).append(m.group(2))
+        out = []
+        for key in sorted(tasks.keys()):
+            out.extend(tasks[key])
+        out.extend(remaining)
+        return '\n'.join(out)
+
+    def parseGclient(self, cmd, items, expected_stderr='', untangle=False):
+        """Parse gclient's output to make it easier to test.
     If untangle is True, tries to sort out the output from parallel checkout."""
-    (stdout, stderr, _) = self.gclient(cmd)
-    if untangle:
-      stdout = self.untangle(stdout)
-    self.checkString(expected_stderr, stderr)
-    return self.checkBlock(stdout, items)
-
-  def splitBlock(self, stdout):
-    """Split gclient's output into logical execution blocks.
+        (stdout, stderr, _) = self.gclient(cmd)
+        if untangle:
+            stdout = self.untangle(stdout)
+        self.checkString(expected_stderr, stderr)
+        return self.checkBlock(stdout, items)
+
+    def splitBlock(self, stdout):
+        """Split gclient's output into logical execution blocks.
     ___ running 'foo' at '/bar'
     (...)
     ___ running 'baz' at '/bar'
@@ -87,59 +90,64 @@ class GClientSmokeBase(fake_repos.FakeReposTestBase):
 
     will result in 2 items of len((...).splitlines()) each.
     """
-    results = []
-    for line in stdout.splitlines(False):
-      # Intentionally skips empty lines.
-      if not line:
-        continue
-      if not line.startswith('__'):
-        if results:
-          results[-1].append(line)
-        else:
-          # TODO(maruel): gclient's git stdout is inconsistent.
-          # This should fail the test instead!!
-          pass
-        continue
-
-      match = re.match(r'^________ ([a-z]+) \'(.*)\' in \'(.*)\'$', line)
-      if match:
-        results.append([[match.group(1), match.group(2), match.group(3)]])
-        continue
-
-      match = re.match(r'^_____ (.*) is missing, syncing instead$', line)
-      if match:
-        # Blah, it's when a dependency is deleted, we should probably not
-        # output this message.
-        results.append([line])
-        continue
-
-      # These two regexps are a bit too broad, they are necessary only for git
-      # checkouts.
-      if (re.match(r'_____ [^ ]+ at [^ ]+', line) or
-          re.match(r'_____ [^ ]+ : Attempting rebase onto [0-9a-f]+...', line)):
-        continue
-
-      # Fail for any unrecognized lines that start with '__'.
-      self.fail(line)
-    return results
-
-  def checkBlock(self, stdout, items):
-    results = self.splitBlock(stdout)
-    for i in range(min(len(results), len(items))):
-      if isinstance(items[i], (list, tuple)):
-        verb = items[i][0]
-        path = items[i][1]
-      else:
-        verb = items[i]
-        path = self.root_dir
-      self.checkString(results[i][0][0], verb, (i, results[i][0][0], verb))
-      if sys.platform == 'win32':
-        # Make path lower case since casing can change randomly.
-        self.checkString(
-            results[i][0][2].lower(),
-            path.lower(),
-            (i, results[i][0][2].lower(), path.lower()))
-      else:
-        self.checkString(results[i][0][2], path, (i, results[i][0][2], path))
-    self.assertEqual(len(results), len(items), (stdout, items, len(results)))
-    return results
+        results = []
+        for line in stdout.splitlines(False):
+            # Intentionally skips empty lines.
+            if not line:
+                continue
+            if not line.startswith('__'):
+                if results:
+                    results[-1].append(line)
+                else:
+                    # TODO(maruel): gclient's git stdout is inconsistent.
+                    # This should fail the test instead!!
+                    pass
+                continue
+
+            match = re.match(r'^________ ([a-z]+) \'(.*)\' in \'(.*)\'$', line)
+            if match:
+                results.append(
+                    [[match.group(1),
+                      match.group(2),
+                      match.group(3)]])
+                continue
+
+            match = re.match(r'^_____ (.*) is missing, syncing instead$', line)
+            if match:
+                # Blah, it's when a dependency is deleted, we should probably
+                # not output this message.
+                results.append([line])
+                continue
+
+            # These two regexps are a bit too broad, they are necessary only for
+            # git checkouts.
+            if (re.match(r'_____ [^ ]+ at [^ ]+', line) or re.match(
+                    r'_____ [^ ]+ : Attempting rebase onto [0-9a-f]+...',
+                    line)):
+                continue
+
+            # Fail for any unrecognized lines that start with '__'.
+            self.fail(line)
+        return results
+
+    def checkBlock(self, stdout, items):
+        results = self.splitBlock(stdout)
+        for i in range(min(len(results), len(items))):
+            if isinstance(items[i], (list, tuple)):
+                verb = items[i][0]
+                path = items[i][1]
+            else:
+                verb = items[i]
+                path = self.root_dir
+            self.checkString(results[i][0][0], verb,
+                             (i, results[i][0][0], verb))
+            if sys.platform == 'win32':
+                # Make path lower case since casing can change randomly.
+                self.checkString(results[i][0][2].lower(), path.lower(),
+                                 (i, results[i][0][2].lower(), path.lower()))
+            else:
+                self.checkString(results[i][0][2], path,
+                                 (i, results[i][0][2], path))
+        self.assertEqual(len(results), len(items),
+                         (stdout, items, len(results)))
+        return results

Plik diff jest za duży
+ 469 - 479
tests/gclient_test.py


+ 234 - 200
tests/gclient_transitions_smoketest.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2020 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Smoke tests for gclient.py.
 
 Shell out 'gclient' and simulate the behavior of bisect bots as they transition
@@ -22,221 +21,256 @@ sys.path.insert(0, ROOT_DIR)
 import scm
 from testing_support import fake_repos
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class SkiaDEPSTransitionSmokeTest(gclient_smoketest_base.GClientSmokeBase):
-  """Simulate the behavior of bisect bots as they transition across the Skia
+    """Simulate the behavior of bisect bots as they transition across the Skia
   DEPS change."""
 
-  FAKE_REPOS_CLASS = fake_repos.FakeRepoSkiaDEPS
-
-  def setUp(self):
-    super(SkiaDEPSTransitionSmokeTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    if not self.enabled:
-      self.skipTest('git fake repos not available')
-
-  def testSkiaDEPSChangeGit(self):
-    # Create an initial checkout:
-    # - Single checkout at the root.
-    # - Multiple checkouts in a shared subdirectory.
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": ' + repr(self.git_base )+ '+ "repo_2",'
-        '}]'])
-
-    checkout_path = os.path.join(self.root_dir, 'src')
-    skia = os.path.join(checkout_path, 'third_party', 'skia')
-    skia_gyp = os.path.join(skia, 'gyp')
-    skia_include = os.path.join(skia, 'include')
-    skia_src = os.path.join(skia, 'src')
-
-    gyp_git_url = self.git_base + 'repo_3'
-    include_git_url = self.git_base + 'repo_4'
-    src_git_url = self.git_base + 'repo_5'
-    skia_git_url = self.FAKE_REPOS.git_base + 'repo_1'
-
-    pre_hash = self.githash('repo_2', 1)
-    post_hash = self.githash('repo_2', 2)
-
-    # Initial sync. Verify that we get the expected checkout.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision',
-                        'src@%s' % pre_hash])
-    self.assertEqual(res[2], 0, 'Initial sync failed.')
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_gyp), gyp_git_url)
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_include), include_git_url)
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_src), src_git_url)
-
-    # Verify that the sync succeeds. Verify that we have the  expected merged
-    # checkout.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision',
-                        'src@%s' % post_hash])
-    self.assertEqual(res[2], 0, 'DEPS change sync failed.')
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia), skia_git_url)
-
-    # Sync again. Verify that we still have the expected merged checkout.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision',
-                        'src@%s' % post_hash])
-    self.assertEqual(res[2], 0, 'Subsequent sync failed.')
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia), skia_git_url)
-
-    # Sync back to the original DEPS. Verify that we get the original structure.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision',
-                        'src@%s' % pre_hash])
-    self.assertEqual(res[2], 0, 'Reverse sync failed.')
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_gyp), gyp_git_url)
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_include), include_git_url)
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_src), src_git_url)
-
-    # Sync again. Verify that we still have the original structure.
-    res = self.gclient(['sync', '--deps', 'mac', '--revision',
-                        'src@%s' % pre_hash])
-    self.assertEqual(res[2], 0, 'Subsequent sync #2 failed.')
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_gyp), gyp_git_url)
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_include), include_git_url)
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             skia_src), src_git_url)
+    FAKE_REPOS_CLASS = fake_repos.FakeRepoSkiaDEPS
+
+    def setUp(self):
+        super(SkiaDEPSTransitionSmokeTest, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        if not self.enabled:
+            self.skipTest('git fake repos not available')
+
+    def testSkiaDEPSChangeGit(self):
+        # Create an initial checkout:
+        # - Single checkout at the root.
+        # - Multiple checkouts in a shared subdirectory.
+        self.gclient([
+            'config', '--spec', 'solutions=['
+            '{"name": "src",'
+            ' "url": ' + repr(self.git_base) + '+ "repo_2",'
+            '}]'
+        ])
+
+        checkout_path = os.path.join(self.root_dir, 'src')
+        skia = os.path.join(checkout_path, 'third_party', 'skia')
+        skia_gyp = os.path.join(skia, 'gyp')
+        skia_include = os.path.join(skia, 'include')
+        skia_src = os.path.join(skia, 'src')
+
+        gyp_git_url = self.git_base + 'repo_3'
+        include_git_url = self.git_base + 'repo_4'
+        src_git_url = self.git_base + 'repo_5'
+        skia_git_url = self.FAKE_REPOS.git_base + 'repo_1'
+
+        pre_hash = self.githash('repo_2', 1)
+        post_hash = self.githash('repo_2', 2)
+
+        # Initial sync. Verify that we get the expected checkout.
+        res = self.gclient(
+            ['sync', '--deps', 'mac', '--revision',
+             'src@%s' % pre_hash])
+        self.assertEqual(res[2], 0, 'Initial sync failed.')
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_gyp),
+            gyp_git_url)
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_include),
+            include_git_url)
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_src),
+            src_git_url)
+
+        # Verify that the sync succeeds. Verify that we have the  expected
+        # merged checkout.
+        res = self.gclient(
+            ['sync', '--deps', 'mac', '--revision',
+             'src@%s' % post_hash])
+        self.assertEqual(res[2], 0, 'DEPS change sync failed.')
+        self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'], skia),
+                         skia_git_url)
+
+        # Sync again. Verify that we still have the expected merged checkout.
+        res = self.gclient(
+            ['sync', '--deps', 'mac', '--revision',
+             'src@%s' % post_hash])
+        self.assertEqual(res[2], 0, 'Subsequent sync failed.')
+        self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'], skia),
+                         skia_git_url)
+
+        # Sync back to the original DEPS. Verify that we get the original
+        # structure.
+        res = self.gclient(
+            ['sync', '--deps', 'mac', '--revision',
+             'src@%s' % pre_hash])
+        self.assertEqual(res[2], 0, 'Reverse sync failed.')
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_gyp),
+            gyp_git_url)
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_include),
+            include_git_url)
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_src),
+            src_git_url)
+
+        # Sync again. Verify that we still have the original structure.
+        res = self.gclient(
+            ['sync', '--deps', 'mac', '--revision',
+             'src@%s' % pre_hash])
+        self.assertEqual(res[2], 0, 'Subsequent sync #2 failed.')
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_gyp),
+            gyp_git_url)
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_include),
+            include_git_url)
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], skia_src),
+            src_git_url)
 
 
 class BlinkDEPSTransitionSmokeTest(gclient_smoketest_base.GClientSmokeBase):
-  """Simulate the behavior of bisect bots as they transition across the Blink
+    """Simulate the behavior of bisect bots as they transition across the Blink
   DEPS change."""
 
-  FAKE_REPOS_CLASS = fake_repos.FakeRepoBlinkDEPS
-
-  def setUp(self):
-    super(BlinkDEPSTransitionSmokeTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    if not self.enabled:
-      self.skipTest('git fake repos not available')
-    self.checkout_path = os.path.join(self.root_dir, 'src')
-    self.blink = os.path.join(self.checkout_path, 'third_party', 'WebKit')
-    self.blink_git_url = self.FAKE_REPOS.git_base + 'repo_2'
-    self.pre_merge_sha = self.githash('repo_1', 1)
-    self.post_merge_sha = self.githash('repo_1', 2)
-
-  def CheckStatusPreMergePoint(self):
-    self.assertEqual(scm.GIT.Capture(['config', 'remote.origin.url'],
-                                             self.blink), self.blink_git_url)
-    self.assertTrue(os.path.exists(join(self.blink, '.git')))
-    self.assertTrue(os.path.exists(join(self.blink, 'OWNERS')))
-    with open(join(self.blink, 'OWNERS')) as f:
-      owners_content = f.read()
-      self.assertEqual('OWNERS-pre', owners_content, 'OWNERS not updated')
-    self.assertTrue(os.path.exists(join(self.blink, 'Source', 'exists_always')))
-    self.assertTrue(os.path.exists(
-        join(self.blink, 'Source', 'exists_before_but_not_after')))
-    self.assertFalse(os.path.exists(
-        join(self.blink, 'Source', 'exists_after_but_not_before')))
-
-  def CheckStatusPostMergePoint(self):
-    # Check that the contents still exists
-    self.assertTrue(os.path.exists(join(self.blink, 'OWNERS')))
-    with open(join(self.blink, 'OWNERS')) as f:
-      owners_content = f.read()
-      self.assertEqual('OWNERS-post', owners_content, 'OWNERS not updated')
-    self.assertTrue(os.path.exists(join(self.blink, 'Source', 'exists_always')))
-    # Check that file removed between the branch point are actually deleted.
-    self.assertTrue(os.path.exists(
-        join(self.blink, 'Source', 'exists_after_but_not_before')))
-    self.assertFalse(os.path.exists(
-        join(self.blink, 'Source', 'exists_before_but_not_after')))
-    # But not the .git folder
-    self.assertFalse(os.path.exists(join(self.blink, '.git')))
-
-  @unittest.skip('flaky')
-  def testBlinkDEPSChangeUsingGclient(self):
-    """Checks that {src,blink} repos are consistent when syncing going back and
+    FAKE_REPOS_CLASS = fake_repos.FakeRepoBlinkDEPS
+
+    def setUp(self):
+        super(BlinkDEPSTransitionSmokeTest, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        if not self.enabled:
+            self.skipTest('git fake repos not available')
+        self.checkout_path = os.path.join(self.root_dir, 'src')
+        self.blink = os.path.join(self.checkout_path, 'third_party', 'WebKit')
+        self.blink_git_url = self.FAKE_REPOS.git_base + 'repo_2'
+        self.pre_merge_sha = self.githash('repo_1', 1)
+        self.post_merge_sha = self.githash('repo_1', 2)
+
+    def CheckStatusPreMergePoint(self):
+        self.assertEqual(
+            scm.GIT.Capture(['config', 'remote.origin.url'], self.blink),
+            self.blink_git_url)
+        self.assertTrue(os.path.exists(join(self.blink, '.git')))
+        self.assertTrue(os.path.exists(join(self.blink, 'OWNERS')))
+        with open(join(self.blink, 'OWNERS')) as f:
+            owners_content = f.read()
+            self.assertEqual('OWNERS-pre', owners_content, 'OWNERS not updated')
+        self.assertTrue(
+            os.path.exists(join(self.blink, 'Source', 'exists_always')))
+        self.assertTrue(
+            os.path.exists(
+                join(self.blink, 'Source', 'exists_before_but_not_after')))
+        self.assertFalse(
+            os.path.exists(
+                join(self.blink, 'Source', 'exists_after_but_not_before')))
+
+    def CheckStatusPostMergePoint(self):
+        # Check that the contents still exists
+        self.assertTrue(os.path.exists(join(self.blink, 'OWNERS')))
+        with open(join(self.blink, 'OWNERS')) as f:
+            owners_content = f.read()
+            self.assertEqual('OWNERS-post', owners_content,
+                             'OWNERS not updated')
+        self.assertTrue(
+            os.path.exists(join(self.blink, 'Source', 'exists_always')))
+        # Check that file removed between the branch point are actually deleted.
+        self.assertTrue(
+            os.path.exists(
+                join(self.blink, 'Source', 'exists_after_but_not_before')))
+        self.assertFalse(
+            os.path.exists(
+                join(self.blink, 'Source', 'exists_before_but_not_after')))
+        # But not the .git folder
+        self.assertFalse(os.path.exists(join(self.blink, '.git')))
+
+    @unittest.skip('flaky')
+    def testBlinkDEPSChangeUsingGclient(self):
+        """Checks that {src,blink} repos are consistent when syncing going back and
     forth using gclient sync src@revision."""
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.git_base + 'repo_1",'
-        '}]'])
-
-    # Go back and forth two times.
-    for _ in range(2):
-      res = self.gclient(['sync', '--jobs', '1',
-                          '--revision', 'src@%s' % self.pre_merge_sha])
-      self.assertEqual(res[2], 0, 'DEPS change sync failed.')
-      self.CheckStatusPreMergePoint()
-
-      res = self.gclient(['sync', '--jobs', '1',
-                          '--revision', 'src@%s' % self.post_merge_sha])
-      self.assertEqual(res[2], 0, 'DEPS change sync failed.')
-      self.CheckStatusPostMergePoint()
-
-
-  @unittest.skip('flaky')
-  def testBlinkDEPSChangeUsingGit(self):
-    """Like testBlinkDEPSChangeUsingGclient, but move the main project using
+        self.gclient([
+            'config', '--spec', 'solutions=['
+            '{"name": "src",'
+            ' "url": "' + self.git_base + 'repo_1",'
+            '}]'
+        ])
+
+        # Go back and forth two times.
+        for _ in range(2):
+            res = self.gclient([
+                'sync', '--jobs', '1', '--revision',
+                'src@%s' % self.pre_merge_sha
+            ])
+            self.assertEqual(res[2], 0, 'DEPS change sync failed.')
+            self.CheckStatusPreMergePoint()
+
+            res = self.gclient([
+                'sync', '--jobs', '1', '--revision',
+                'src@%s' % self.post_merge_sha
+            ])
+            self.assertEqual(res[2], 0, 'DEPS change sync failed.')
+            self.CheckStatusPostMergePoint()
+
+    @unittest.skip('flaky')
+    def testBlinkDEPSChangeUsingGit(self):
+        """Like testBlinkDEPSChangeUsingGclient, but move the main project using
     directly git and not gclient sync."""
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.git_base + 'repo_1",'
-        ' "managed": False,'
-        '}]'])
-
-    # Perform an initial sync to bootstrap the repo.
-    res = self.gclient(['sync', '--jobs', '1'])
-    self.assertEqual(res[2], 0, 'Initial gclient sync failed.')
-
-    # Go back and forth two times.
-    for _ in range(2):
-      subprocess2.check_call(['git', 'checkout', '-q', self.pre_merge_sha],
-                             cwd=self.checkout_path)
-      res = self.gclient(['sync', '--jobs', '1'])
-      self.assertEqual(res[2], 0, 'gclient sync failed.')
-      self.CheckStatusPreMergePoint()
-
-      subprocess2.check_call(['git', 'checkout', '-q', self.post_merge_sha],
-                             cwd=self.checkout_path)
-      res = self.gclient(['sync', '--jobs', '1'])
-      self.assertEqual(res[2], 0, 'DEPS change sync failed.')
-      self.CheckStatusPostMergePoint()
-
-
-  @unittest.skip('flaky')
-  def testBlinkLocalBranchesArePreserved(self):
-    """Checks that the state of local git branches are effectively preserved
+        self.gclient([
+            'config', '--spec', 'solutions=['
+            '{"name": "src",'
+            ' "url": "' + self.git_base + 'repo_1",'
+            ' "managed": False,'
+            '}]'
+        ])
+
+        # Perform an initial sync to bootstrap the repo.
+        res = self.gclient(['sync', '--jobs', '1'])
+        self.assertEqual(res[2], 0, 'Initial gclient sync failed.')
+
+        # Go back and forth two times.
+        for _ in range(2):
+            subprocess2.check_call(
+                ['git', 'checkout', '-q', self.pre_merge_sha],
+                cwd=self.checkout_path)
+            res = self.gclient(['sync', '--jobs', '1'])
+            self.assertEqual(res[2], 0, 'gclient sync failed.')
+            self.CheckStatusPreMergePoint()
+
+            subprocess2.check_call(
+                ['git', 'checkout', '-q', self.post_merge_sha],
+                cwd=self.checkout_path)
+            res = self.gclient(['sync', '--jobs', '1'])
+            self.assertEqual(res[2], 0, 'DEPS change sync failed.')
+            self.CheckStatusPostMergePoint()
+
+    @unittest.skip('flaky')
+    def testBlinkLocalBranchesArePreserved(self):
+        """Checks that the state of local git branches are effectively preserved
     when going back and forth."""
-    self.gclient(['config', '--spec',
-        'solutions=['
-        '{"name": "src",'
-        ' "url": "' + self.git_base + 'repo_1",'
-        '}]'])
+        self.gclient([
+            'config', '--spec', 'solutions=['
+            '{"name": "src",'
+            ' "url": "' + self.git_base + 'repo_1",'
+            '}]'
+        ])
 
-    # Initialize to pre-merge point.
-    self.gclient(['sync', '--revision', 'src@%s' % self.pre_merge_sha])
-    self.CheckStatusPreMergePoint()
+        # Initialize to pre-merge point.
+        self.gclient(['sync', '--revision', 'src@%s' % self.pre_merge_sha])
+        self.CheckStatusPreMergePoint()
 
-    # Create a branch named "foo".
-    subprocess2.check_call(['git', 'checkout', '-qB', 'foo'],
-                           cwd=self.blink)
+        # Create a branch named "foo".
+        subprocess2.check_call(['git', 'checkout', '-qB', 'foo'],
+                               cwd=self.blink)
 
-    # Cross the pre-merge point.
-    self.gclient(['sync', '--revision', 'src@%s' % self.post_merge_sha])
-    self.CheckStatusPostMergePoint()
+        # Cross the pre-merge point.
+        self.gclient(['sync', '--revision', 'src@%s' % self.post_merge_sha])
+        self.CheckStatusPostMergePoint()
 
-    # Go backwards and check that we still have the foo branch.
-    self.gclient(['sync', '--revision', 'src@%s' % self.pre_merge_sha])
-    self.CheckStatusPreMergePoint()
-    subprocess2.check_call(
-        ['git', 'show-ref', '-q', '--verify', 'refs/heads/foo'], cwd=self.blink)
+        # Go backwards and check that we still have the foo branch.
+        self.gclient(['sync', '--revision', 'src@%s' % self.pre_merge_sha])
+        self.CheckStatusPreMergePoint()
+        subprocess2.check_call(
+            ['git', 'show-ref', '-q', '--verify', 'refs/heads/foo'],
+            cwd=self.blink)
 
 
 if __name__ == '__main__':
-  if '-v' in sys.argv:
-    logging.basicConfig(level=logging.DEBUG)
-  unittest.main()
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()

+ 367 - 337
tests/gclient_utils_test.py

@@ -10,369 +10,399 @@ import sys
 import unittest
 from unittest import mock
 
-
 sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 
 import gclient_utils
 import subprocess2
 from testing_support import trial_dir
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class CheckCallAndFilterTestCase(unittest.TestCase):
-  class ProcessIdMock(object):
-    def __init__(self, test_string, return_code=0):
-      self.stdout = test_string.encode('utf-8')
-      self.pid = 9284
-      self.return_code = return_code
-
-    def wait(self):
-      return self.return_code
-
-  def PopenMock(self, *args, **kwargs):
-    kid = self.kids.pop(0)
-    stdout = kwargs.get('stdout')
-    os.write(stdout, kid.stdout)
-    return kid
-
-  def setUp(self):
-    super(CheckCallAndFilterTestCase, self).setUp()
-    self.printfn = io.StringIO()
-    self.stdout = io.BytesIO()
-    self.kids = []
-    mock.patch('sys.stdout', mock.Mock()).start()
-    mock.patch('sys.stdout.buffer', self.stdout).start()
-    mock.patch('sys.stdout.isatty', return_value=False).start()
-    mock.patch('builtins.print', self.printfn.write).start()
-    mock.patch('sys.stdout.flush', lambda: None).start()
-    self.addCleanup(mock.patch.stopall)
-
-  @mock.patch('subprocess2.Popen')
-  def testCheckCallAndFilter(self, mockPopen):
-    cwd = 'bleh'
-    args = ['boo', 'foo', 'bar']
-    test_string = 'ahah\naccb\nallo\naddb\n✔'
-
-    self.kids = [self.ProcessIdMock(test_string)]
-    mockPopen.side_effect = self.PopenMock
-
-    line_list = []
-    result = gclient_utils.CheckCallAndFilter(
-        args, cwd=cwd, show_header=True, always_show_header=True,
-        filter_fn=line_list.append)
-
-    self.assertEqual(result, test_string.encode('utf-8'))
-    self.assertEqual(line_list, [
-        '________ running \'boo foo bar\' in \'bleh\'\n',
-        'ahah',
-        'accb',
-        'allo',
-        'addb',
-        '✔'])
-    self.assertEqual(self.stdout.getvalue(), b'')
-
-    mockPopen.assert_called_with(
-        args, cwd=cwd, stdout=mock.ANY, stderr=subprocess2.STDOUT,
-        bufsize=0)
-
-  @mock.patch('time.sleep')
-  @mock.patch('subprocess2.Popen')
-  def testCheckCallAndFilter_RetryOnce(self, mockPopen, mockTime):
-    cwd = 'bleh'
-    args = ['boo', 'foo', 'bar']
-    test_string = 'ahah\naccb\nallo\naddb\n✔'
-
-    self.kids = [
-        self.ProcessIdMock(test_string, 1),
-        self.ProcessIdMock(test_string, 0)
-    ]
-    mockPopen.side_effect = self.PopenMock
-
-    line_list = []
-    result = gclient_utils.CheckCallAndFilter(
-        args, cwd=cwd, show_header=True, always_show_header=True,
-        filter_fn=line_list.append, retry=True)
-
-    self.assertEqual(result, test_string.encode('utf-8'))
-
-    self.assertEqual(line_list, [
-        '________ running \'boo foo bar\' in \'bleh\'\n',
-        'ahah',
-        'accb',
-        'allo',
-        'addb',
-        '✔',
-        '________ running \'boo foo bar\' in \'bleh\' attempt 2 / 2\n',
-        'ahah',
-        'accb',
-        'allo',
-        'addb',
-        '✔',
-    ])
-
-    mockTime.assert_called_with(gclient_utils.RETRY_INITIAL_SLEEP)
-
-    self.assertEqual(
-        mockPopen.mock_calls,
-        [
-            mock.call(
-                args, cwd=cwd, stdout=mock.ANY,
-                stderr=subprocess2.STDOUT, bufsize=0),
-            mock.call(
-                args, cwd=cwd, stdout=mock.ANY,
-                stderr=subprocess2.STDOUT, bufsize=0),
+    class ProcessIdMock(object):
+        def __init__(self, test_string, return_code=0):
+            self.stdout = test_string.encode('utf-8')
+            self.pid = 9284
+            self.return_code = return_code
+
+        def wait(self):
+            return self.return_code
+
+    def PopenMock(self, *args, **kwargs):
+        kid = self.kids.pop(0)
+        stdout = kwargs.get('stdout')
+        os.write(stdout, kid.stdout)
+        return kid
+
+    def setUp(self):
+        super(CheckCallAndFilterTestCase, self).setUp()
+        self.printfn = io.StringIO()
+        self.stdout = io.BytesIO()
+        self.kids = []
+        mock.patch('sys.stdout', mock.Mock()).start()
+        mock.patch('sys.stdout.buffer', self.stdout).start()
+        mock.patch('sys.stdout.isatty', return_value=False).start()
+        mock.patch('builtins.print', self.printfn.write).start()
+        mock.patch('sys.stdout.flush', lambda: None).start()
+        self.addCleanup(mock.patch.stopall)
+
+    @mock.patch('subprocess2.Popen')
+    def testCheckCallAndFilter(self, mockPopen):
+        cwd = 'bleh'
+        args = ['boo', 'foo', 'bar']
+        test_string = 'ahah\naccb\nallo\naddb\n✔'
+
+        self.kids = [self.ProcessIdMock(test_string)]
+        mockPopen.side_effect = self.PopenMock
+
+        line_list = []
+        result = gclient_utils.CheckCallAndFilter(args,
+                                                  cwd=cwd,
+                                                  show_header=True,
+                                                  always_show_header=True,
+                                                  filter_fn=line_list.append)
+
+        self.assertEqual(result, test_string.encode('utf-8'))
+        self.assertEqual(line_list, [
+            '________ running \'boo foo bar\' in \'bleh\'\n', 'ahah', 'accb',
+            'allo', 'addb', '✔'
+        ])
+        self.assertEqual(self.stdout.getvalue(), b'')
+
+        mockPopen.assert_called_with(args,
+                                     cwd=cwd,
+                                     stdout=mock.ANY,
+                                     stderr=subprocess2.STDOUT,
+                                     bufsize=0)
+
+    @mock.patch('time.sleep')
+    @mock.patch('subprocess2.Popen')
+    def testCheckCallAndFilter_RetryOnce(self, mockPopen, mockTime):
+        cwd = 'bleh'
+        args = ['boo', 'foo', 'bar']
+        test_string = 'ahah\naccb\nallo\naddb\n✔'
+
+        self.kids = [
+            self.ProcessIdMock(test_string, 1),
+            self.ProcessIdMock(test_string, 0)
+        ]
+        mockPopen.side_effect = self.PopenMock
+
+        line_list = []
+        result = gclient_utils.CheckCallAndFilter(args,
+                                                  cwd=cwd,
+                                                  show_header=True,
+                                                  always_show_header=True,
+                                                  filter_fn=line_list.append,
+                                                  retry=True)
+
+        self.assertEqual(result, test_string.encode('utf-8'))
+
+        self.assertEqual(line_list, [
+            '________ running \'boo foo bar\' in \'bleh\'\n',
+            'ahah',
+            'accb',
+            'allo',
+            'addb',
+            '✔',
+            '________ running \'boo foo bar\' in \'bleh\' attempt 2 / 2\n',
+            'ahah',
+            'accb',
+            'allo',
+            'addb',
+            '✔',
         ])
 
-    self.assertEqual(self.stdout.getvalue(), b'')
-    self.assertEqual(
-        self.printfn.getvalue(),
-        'WARNING: subprocess \'"boo" "foo" "bar"\' in bleh failed; will retry '
-        'after a short nap...')
-
-  @mock.patch('subprocess2.Popen')
-  def testCheckCallAndFilter_PrintStdout(self, mockPopen):
-    cwd = 'bleh'
-    args = ['boo', 'foo', 'bar']
-    test_string = 'ahah\naccb\nallo\naddb\n✔'
-
-    self.kids = [self.ProcessIdMock(test_string)]
-    mockPopen.side_effect = self.PopenMock
-
-    result = gclient_utils.CheckCallAndFilter(
-        args, cwd=cwd, show_header=True, always_show_header=True,
-        print_stdout=True)
+        mockTime.assert_called_with(gclient_utils.RETRY_INITIAL_SLEEP)
+
+        self.assertEqual(mockPopen.mock_calls, [
+            mock.call(args,
+                      cwd=cwd,
+                      stdout=mock.ANY,
+                      stderr=subprocess2.STDOUT,
+                      bufsize=0),
+            mock.call(args,
+                      cwd=cwd,
+                      stdout=mock.ANY,
+                      stderr=subprocess2.STDOUT,
+                      bufsize=0),
+        ])
 
-    self.assertEqual(result, test_string.encode('utf-8'))
-    self.assertEqual(self.stdout.getvalue().splitlines(), [
-        b"________ running 'boo foo bar' in 'bleh'",
-        b'ahah',
-        b'accb',
-        b'allo',
-        b'addb',
-        b'\xe2\x9c\x94',
-    ])
+        self.assertEqual(self.stdout.getvalue(), b'')
+        self.assertEqual(
+            self.printfn.getvalue(),
+            'WARNING: subprocess \'"boo" "foo" "bar"\' in bleh failed; will retry '
+            'after a short nap...')
+
+    @mock.patch('subprocess2.Popen')
+    def testCheckCallAndFilter_PrintStdout(self, mockPopen):
+        cwd = 'bleh'
+        args = ['boo', 'foo', 'bar']
+        test_string = 'ahah\naccb\nallo\naddb\n✔'
+
+        self.kids = [self.ProcessIdMock(test_string)]
+        mockPopen.side_effect = self.PopenMock
+
+        result = gclient_utils.CheckCallAndFilter(args,
+                                                  cwd=cwd,
+                                                  show_header=True,
+                                                  always_show_header=True,
+                                                  print_stdout=True)
+
+        self.assertEqual(result, test_string.encode('utf-8'))
+        self.assertEqual(self.stdout.getvalue().splitlines(), [
+            b"________ running 'boo foo bar' in 'bleh'",
+            b'ahah',
+            b'accb',
+            b'allo',
+            b'addb',
+            b'\xe2\x9c\x94',
+        ])
 
 
 class AnnotatedTestCase(unittest.TestCase):
-  def setUp(self):
-    self.out = gclient_utils.MakeFileAnnotated(io.BytesIO())
-    self.annotated = gclient_utils.MakeFileAnnotated(
-        io.BytesIO(), include_zero=True)
-
-  def testWrite(self):
-    test_cases = [
-        ('test string\n', b'test string\n'),
-        (b'test string\n', b'test string\n'),
-        ('✔\n', b'\xe2\x9c\x94\n'),
-        (b'\xe2\x9c\x94\n', b'\xe2\x9c\x94\n'),
-        ('first line\nsecondline\n', b'first line\nsecondline\n'),
-        (b'first line\nsecondline\n', b'first line\nsecondline\n'),
-    ]
-
-    for test_input, expected_output in test_cases:
-      out = gclient_utils.MakeFileAnnotated(io.BytesIO())
-      out.write(test_input)
-      self.assertEqual(out.getvalue(), expected_output)
-
-  def testWrite_Annotated(self):
-    test_cases = [
-        ('test string\n', b'0>test string\n'),
-        (b'test string\n', b'0>test string\n'),
-        ('✔\n', b'0>\xe2\x9c\x94\n'),
-        (b'\xe2\x9c\x94\n', b'0>\xe2\x9c\x94\n'),
-        ('first line\nsecondline\n', b'0>first line\n0>secondline\n'),
-        (b'first line\nsecondline\n', b'0>first line\n0>secondline\n'),
-    ]
-
-    for test_input, expected_output in test_cases:
-      out = gclient_utils.MakeFileAnnotated(io.BytesIO(), include_zero=True)
-      out.write(test_input)
-      self.assertEqual(out.getvalue(), expected_output)
-
-  def testByteByByteInput(self):
-    self.out.write(b'\xe2')
-    self.out.write(b'\x9c')
-    self.out.write(b'\x94')
-    self.out.write(b'\n')
-    self.out.write(b'\xe2')
-    self.out.write(b'\n')
-    self.assertEqual(self.out.getvalue(), b'\xe2\x9c\x94\n\xe2\n')
-
-  def testByteByByteInput_Annotated(self):
-    self.annotated.write(b'\xe2')
-    self.annotated.write(b'\x9c')
-    self.annotated.write(b'\x94')
-    self.annotated.write(b'\n')
-    self.annotated.write(b'\xe2')
-    self.annotated.write(b'\n')
-    self.assertEqual(self.annotated.getvalue(), b'0>\xe2\x9c\x94\n0>\xe2\n')
-
-  def testFlush_Annotated(self):
-    self.annotated.write(b'first line\nsecond line')
-    self.assertEqual(self.annotated.getvalue(), b'0>first line\n')
-    self.annotated.flush()
-    self.assertEqual(
-        self.annotated.getvalue(), b'0>first line\n0>second line\n')
+    def setUp(self):
+        self.out = gclient_utils.MakeFileAnnotated(io.BytesIO())
+        self.annotated = gclient_utils.MakeFileAnnotated(io.BytesIO(),
+                                                         include_zero=True)
+
+    def testWrite(self):
+        test_cases = [
+            ('test string\n', b'test string\n'),
+            (b'test string\n', b'test string\n'),
+            ('✔\n', b'\xe2\x9c\x94\n'),
+            (b'\xe2\x9c\x94\n', b'\xe2\x9c\x94\n'),
+            ('first line\nsecondline\n', b'first line\nsecondline\n'),
+            (b'first line\nsecondline\n', b'first line\nsecondline\n'),
+        ]
+
+        for test_input, expected_output in test_cases:
+            out = gclient_utils.MakeFileAnnotated(io.BytesIO())
+            out.write(test_input)
+            self.assertEqual(out.getvalue(), expected_output)
+
+    def testWrite_Annotated(self):
+        test_cases = [
+            ('test string\n', b'0>test string\n'),
+            (b'test string\n', b'0>test string\n'),
+            ('✔\n', b'0>\xe2\x9c\x94\n'),
+            (b'\xe2\x9c\x94\n', b'0>\xe2\x9c\x94\n'),
+            ('first line\nsecondline\n', b'0>first line\n0>secondline\n'),
+            (b'first line\nsecondline\n', b'0>first line\n0>secondline\n'),
+        ]
+
+        for test_input, expected_output in test_cases:
+            out = gclient_utils.MakeFileAnnotated(io.BytesIO(),
+                                                  include_zero=True)
+            out.write(test_input)
+            self.assertEqual(out.getvalue(), expected_output)
+
+    def testByteByByteInput(self):
+        self.out.write(b'\xe2')
+        self.out.write(b'\x9c')
+        self.out.write(b'\x94')
+        self.out.write(b'\n')
+        self.out.write(b'\xe2')
+        self.out.write(b'\n')
+        self.assertEqual(self.out.getvalue(), b'\xe2\x9c\x94\n\xe2\n')
+
+    def testByteByByteInput_Annotated(self):
+        self.annotated.write(b'\xe2')
+        self.annotated.write(b'\x9c')
+        self.annotated.write(b'\x94')
+        self.annotated.write(b'\n')
+        self.annotated.write(b'\xe2')
+        self.annotated.write(b'\n')
+        self.assertEqual(self.annotated.getvalue(), b'0>\xe2\x9c\x94\n0>\xe2\n')
+
+    def testFlush_Annotated(self):
+        self.annotated.write(b'first line\nsecond line')
+        self.assertEqual(self.annotated.getvalue(), b'0>first line\n')
+        self.annotated.flush()
+        self.assertEqual(self.annotated.getvalue(),
+                         b'0>first line\n0>second line\n')
 
 
 class SplitUrlRevisionTestCase(unittest.TestCase):
-  def testSSHUrl(self):
-    url = "ssh://test@example.com/test.git"
-    rev = "ac345e52dc"
-    out_url, out_rev = gclient_utils.SplitUrlRevision(url)
-    self.assertEqual(out_rev, None)
-    self.assertEqual(out_url, url)
-    out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
-    self.assertEqual(out_rev, rev)
-    self.assertEqual(out_url, url)
-    url = "ssh://example.com/test.git"
-    out_url, out_rev = gclient_utils.SplitUrlRevision(url)
-    self.assertEqual(out_rev, None)
-    self.assertEqual(out_url, url)
-    out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
-    self.assertEqual(out_rev, rev)
-    self.assertEqual(out_url, url)
-    url = "ssh://example.com/git/test.git"
-    out_url, out_rev = gclient_utils.SplitUrlRevision(url)
-    self.assertEqual(out_rev, None)
-    self.assertEqual(out_url, url)
-    out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
-    self.assertEqual(out_rev, rev)
-    self.assertEqual(out_url, url)
-    rev = "test-stable"
-    out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
-    self.assertEqual(out_rev, rev)
-    self.assertEqual(out_url, url)
-    url = "ssh://user-name@example.com/~/test.git"
-    out_url, out_rev = gclient_utils.SplitUrlRevision(url)
-    self.assertEqual(out_rev, None)
-    self.assertEqual(out_url, url)
-    out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
-    self.assertEqual(out_rev, rev)
-    self.assertEqual(out_url, url)
-    url = "ssh://user-name@example.com/~username/test.git"
-    out_url, out_rev = gclient_utils.SplitUrlRevision(url)
-    self.assertEqual(out_rev, None)
-    self.assertEqual(out_url, url)
-    out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
-    self.assertEqual(out_rev, rev)
-    self.assertEqual(out_url, url)
-    url = "git@github.com:dart-lang/spark.git"
-    out_url, out_rev = gclient_utils.SplitUrlRevision(url)
-    self.assertEqual(out_rev, None)
-    self.assertEqual(out_url, url)
-    out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
-    self.assertEqual(out_rev, rev)
-    self.assertEqual(out_url, url)
-
-  def testSVNUrl(self):
-    url = "svn://example.com/test"
-    rev = "ac345e52dc"
-    out_url, out_rev = gclient_utils.SplitUrlRevision(url)
-    self.assertEqual(out_rev, None)
-    self.assertEqual(out_url, url)
-    out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
-    self.assertEqual(out_rev, rev)
-    self.assertEqual(out_url, url)
+    def testSSHUrl(self):
+        url = "ssh://test@example.com/test.git"
+        rev = "ac345e52dc"
+        out_url, out_rev = gclient_utils.SplitUrlRevision(url)
+        self.assertEqual(out_rev, None)
+        self.assertEqual(out_url, url)
+        out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
+        self.assertEqual(out_rev, rev)
+        self.assertEqual(out_url, url)
+        url = "ssh://example.com/test.git"
+        out_url, out_rev = gclient_utils.SplitUrlRevision(url)
+        self.assertEqual(out_rev, None)
+        self.assertEqual(out_url, url)
+        out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
+        self.assertEqual(out_rev, rev)
+        self.assertEqual(out_url, url)
+        url = "ssh://example.com/git/test.git"
+        out_url, out_rev = gclient_utils.SplitUrlRevision(url)
+        self.assertEqual(out_rev, None)
+        self.assertEqual(out_url, url)
+        out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
+        self.assertEqual(out_rev, rev)
+        self.assertEqual(out_url, url)
+        rev = "test-stable"
+        out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
+        self.assertEqual(out_rev, rev)
+        self.assertEqual(out_url, url)
+        url = "ssh://user-name@example.com/~/test.git"
+        out_url, out_rev = gclient_utils.SplitUrlRevision(url)
+        self.assertEqual(out_rev, None)
+        self.assertEqual(out_url, url)
+        out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
+        self.assertEqual(out_rev, rev)
+        self.assertEqual(out_url, url)
+        url = "ssh://user-name@example.com/~username/test.git"
+        out_url, out_rev = gclient_utils.SplitUrlRevision(url)
+        self.assertEqual(out_rev, None)
+        self.assertEqual(out_url, url)
+        out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
+        self.assertEqual(out_rev, rev)
+        self.assertEqual(out_url, url)
+        url = "git@github.com:dart-lang/spark.git"
+        out_url, out_rev = gclient_utils.SplitUrlRevision(url)
+        self.assertEqual(out_rev, None)
+        self.assertEqual(out_url, url)
+        out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
+        self.assertEqual(out_rev, rev)
+        self.assertEqual(out_url, url)
+
+    def testSVNUrl(self):
+        url = "svn://example.com/test"
+        rev = "ac345e52dc"
+        out_url, out_rev = gclient_utils.SplitUrlRevision(url)
+        self.assertEqual(out_rev, None)
+        self.assertEqual(out_url, url)
+        out_url, out_rev = gclient_utils.SplitUrlRevision("%s@%s" % (url, rev))
+        self.assertEqual(out_rev, rev)
+        self.assertEqual(out_url, url)
 
 
 class ExtracRefNameTest(unittest.TestCase):
-  def testMatchFound(self):
-    self.assertEqual(
-        'main', gclient_utils.ExtractRefName('origin',
-                                             'refs/remote/origin/main'))
-    self.assertEqual('1234',
-                     gclient_utils.ExtractRefName('origin', 'refs/tags/1234'))
-    self.assertEqual(
-        'chicken', gclient_utils.ExtractRefName('origin', 'refs/heads/chicken'))
+    def testMatchFound(self):
+        self.assertEqual(
+            'main',
+            gclient_utils.ExtractRefName('origin', 'refs/remote/origin/main'))
+        self.assertEqual(
+            '1234', gclient_utils.ExtractRefName('origin', 'refs/tags/1234'))
+        self.assertEqual(
+            'chicken',
+            gclient_utils.ExtractRefName('origin', 'refs/heads/chicken'))
 
-  def testNoMatch(self):
-    self.assertIsNone(gclient_utils.ExtractRefName('origin', 'abcbbb1234'))
+    def testNoMatch(self):
+        self.assertIsNone(gclient_utils.ExtractRefName('origin', 'abcbbb1234'))
 
 
 class GClientUtilsTest(trial_dir.TestCase):
-  def testHardToDelete(self):
-    # Use the fact that tearDown will delete the directory to make it hard to do
-    # so.
-    l1 = os.path.join(self.root_dir, 'l1')
-    l2 = os.path.join(l1, 'l2')
-    l3 = os.path.join(l2, 'l3')
-    f3 = os.path.join(l3, 'f3')
-    os.mkdir(l1)
-    os.mkdir(l2)
-    os.mkdir(l3)
-    gclient_utils.FileWrite(f3, 'foo')
-    os.chmod(f3, 0)
-    os.chmod(l3, 0)
-    os.chmod(l2, 0)
-    os.chmod(l1, 0)
-
-  def testUpgradeToHttps(self):
-    values = [
-        ['', ''],
-        [None, None],
-        ['foo', 'https://foo'],
-        ['http://foo', 'https://foo'],
-        ['foo/', 'https://foo/'],
-        ['ssh-svn://foo', 'ssh-svn://foo'],
-        ['ssh-svn://foo/bar/', 'ssh-svn://foo/bar/'],
-        ['codereview.chromium.org', 'https://codereview.chromium.org'],
-        ['codereview.chromium.org/', 'https://codereview.chromium.org/'],
-        [
-          'chromium-review.googlesource.com',
-          'https://chromium-review.googlesource.com'
-        ],
-        [
-          'chromium-review.googlesource.com/',
-          'https://chromium-review.googlesource.com/'
-        ],
-        ['http://foo:10000', 'http://foo:10000'],
-        ['http://foo:10000/bar', 'http://foo:10000/bar'],
-        ['foo:10000', 'http://foo:10000'],
-        ['foo:', 'https://foo:'],
-    ]
-    for content, expected in values:
-      self.assertEqual(
-          expected, gclient_utils.UpgradeToHttps(content))
-
-  def testParseCodereviewSettingsContent(self):
-    values = [
-        ['# bleh\n', {}],
-        ['\t# foo : bar\n', {}],
-        ['Foo:bar', {'Foo': 'bar'}],
-        ['Foo:bar:baz\n', {'Foo': 'bar:baz'}],
-        [' Foo : bar ', {'Foo': 'bar'}],
-        [' Foo : bar \n', {'Foo': 'bar'}],
-        ['a:b\n\rc:d\re:f', {'a': 'b', 'c': 'd', 'e': 'f'}],
-        ['an_url:http://value/', {'an_url': 'http://value/'}],
-        [
-          'CODE_REVIEW_SERVER : http://r/s',
-          {'CODE_REVIEW_SERVER': 'https://r/s'}
-        ],
-        ['VIEW_VC:http://r/s', {'VIEW_VC': 'https://r/s'}],
-    ]
-    for content, expected in values:
-      self.assertEqual(
-          expected, gclient_utils.ParseCodereviewSettingsContent(content))
-
-  def testFileRead_Bytes(self):
-    with gclient_utils.temporary_file() as tmp:
-      gclient_utils.FileWrite(
-          tmp, b'foo \xe2\x9c bar', mode='wb', encoding=None)
-      self.assertEqual('foo \ufffd bar', gclient_utils.FileRead(tmp))
-
-  def testFileRead_Unicode(self):
-    with gclient_utils.temporary_file() as tmp:
-      gclient_utils.FileWrite(tmp, 'foo ✔ bar')
-      self.assertEqual('foo ✔ bar', gclient_utils.FileRead(tmp))
-
-  def testTemporaryFile(self):
-    with gclient_utils.temporary_file() as tmp:
-      gclient_utils.FileWrite(tmp, 'test')
-      self.assertEqual('test', gclient_utils.FileRead(tmp))
-    self.assertFalse(os.path.exists(tmp))
+    def testHardToDelete(self):
+        # Use the fact that tearDown will delete the directory to make it hard
+        # to do so.
+        l1 = os.path.join(self.root_dir, 'l1')
+        l2 = os.path.join(l1, 'l2')
+        l3 = os.path.join(l2, 'l3')
+        f3 = os.path.join(l3, 'f3')
+        os.mkdir(l1)
+        os.mkdir(l2)
+        os.mkdir(l3)
+        gclient_utils.FileWrite(f3, 'foo')
+        os.chmod(f3, 0)
+        os.chmod(l3, 0)
+        os.chmod(l2, 0)
+        os.chmod(l1, 0)
+
+    def testUpgradeToHttps(self):
+        values = [
+            ['', ''],
+            [None, None],
+            ['foo', 'https://foo'],
+            ['http://foo', 'https://foo'],
+            ['foo/', 'https://foo/'],
+            ['ssh-svn://foo', 'ssh-svn://foo'],
+            ['ssh-svn://foo/bar/', 'ssh-svn://foo/bar/'],
+            ['codereview.chromium.org', 'https://codereview.chromium.org'],
+            ['codereview.chromium.org/', 'https://codereview.chromium.org/'],
+            [
+                'chromium-review.googlesource.com',
+                'https://chromium-review.googlesource.com'
+            ],
+            [
+                'chromium-review.googlesource.com/',
+                'https://chromium-review.googlesource.com/'
+            ],
+            ['http://foo:10000', 'http://foo:10000'],
+            ['http://foo:10000/bar', 'http://foo:10000/bar'],
+            ['foo:10000', 'http://foo:10000'],
+            ['foo:', 'https://foo:'],
+        ]
+        for content, expected in values:
+            self.assertEqual(expected, gclient_utils.UpgradeToHttps(content))
+
+    def testParseCodereviewSettingsContent(self):
+        values = [
+            ['# bleh\n', {}],
+            ['\t# foo : bar\n', {}],
+            ['Foo:bar', {
+                'Foo': 'bar'
+            }],
+            ['Foo:bar:baz\n', {
+                'Foo': 'bar:baz'
+            }],
+            [' Foo : bar ', {
+                'Foo': 'bar'
+            }],
+            [' Foo : bar \n', {
+                'Foo': 'bar'
+            }],
+            ['a:b\n\rc:d\re:f', {
+                'a': 'b',
+                'c': 'd',
+                'e': 'f'
+            }],
+            ['an_url:http://value/', {
+                'an_url': 'http://value/'
+            }],
+            [
+                'CODE_REVIEW_SERVER : http://r/s', {
+                    'CODE_REVIEW_SERVER': 'https://r/s'
+                }
+            ],
+            ['VIEW_VC:http://r/s', {
+                'VIEW_VC': 'https://r/s'
+            }],
+        ]
+        for content, expected in values:
+            self.assertEqual(
+                expected, gclient_utils.ParseCodereviewSettingsContent(content))
+
+    def testFileRead_Bytes(self):
+        with gclient_utils.temporary_file() as tmp:
+            gclient_utils.FileWrite(tmp,
+                                    b'foo \xe2\x9c bar',
+                                    mode='wb',
+                                    encoding=None)
+            self.assertEqual('foo \ufffd bar', gclient_utils.FileRead(tmp))
+
+    def testFileRead_Unicode(self):
+        with gclient_utils.temporary_file() as tmp:
+            gclient_utils.FileWrite(tmp, 'foo ✔ bar')
+            self.assertEqual('foo ✔ bar', gclient_utils.FileRead(tmp))
+
+    def testTemporaryFile(self):
+        with gclient_utils.temporary_file() as tmp:
+            gclient_utils.FileWrite(tmp, 'test')
+            self.assertEqual('test', gclient_utils.FileRead(tmp))
+        self.assertFalse(os.path.exists(tmp))
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()
 
 # vim: ts=2:sw=2:tw=80:et:

+ 140 - 138
tests/gerrit_client_test.py

@@ -17,144 +17,146 @@ import gerrit_client
 
 
 class TestGerritClient(unittest.TestCase):
-  @mock.patch('gerrit_util.GetGerritBranch', return_value='')
-  def test_branch_info(self, util_mock):
-    gerrit_client.main([
-        'branchinfo', '--host', 'https://example.org/foo', '--project',
-        'projectname', '--branch', 'branchname'
-    ])
-    util_mock.assert_called_once_with('example.org', 'projectname',
-                                      'branchname')
-
-  @mock.patch('gerrit_util.CreateGerritBranch', return_value='')
-  def test_branch(self, util_mock):
-    gerrit_client.main([
-        'branch', '--host', 'https://example.org/foo', '--project',
-        'projectname', '--branch', 'branchname', '--commit', 'commitname'
-    ])
-    util_mock.assert_called_once_with('example.org', 'projectname',
-                                      'branchname', 'commitname')
-
-  @mock.patch('gerrit_util.QueryChanges', return_value='')
-  def test_changes(self, util_mock):
-    gerrit_client.main([
-        'changes', '--host', 'https://example.org/foo', '-p', 'foo=bar', '-p',
-        'baz=qux', '--limit', '10', '--start', '20', '-o', 'op1', '-o', 'op2'
-    ])
-    util_mock.assert_called_once_with('example.org', [('foo', 'bar'),
-                                                      ('baz', 'qux')],
-                                      first_param=None,
-                                      limit=10,
-                                      start=20,
-                                      o_params=['op1', 'op2'])
-
-  @mock.patch('gerrit_util.QueryChanges', return_value='')
-  def test_changes_query(self, util_mock):
-    gerrit_client.main([
-        'changes',
-        '--host',
-        'https://example.org/foo',
-        '--query',
-        'is:owner is:open',
-        '--limit',
-        '10',
-        '--start',
-        '20',
-    ])
-    util_mock.assert_called_once_with('example.org', [],
-                                      first_param='is:owner is:open',
-                                      limit=10,
-                                      start=20,
-                                      o_params=None)
-
-  @mock.patch('gerrit_util.QueryChanges', return_value='')
-  def test_changes_params_query(self, util_mock):
-    gerrit_client.main([
-        'changes',
-        '--host',
-        'https://example.org/foo',
-        '--query',
-        'is:owner is:open',
-        '-p',
-        'foo=bar',
-        '--limit',
-        '10',
-        '--start',
-        '20',
-    ])
-    util_mock.assert_called_once_with('example.org', [('foo', 'bar')],
-                                      first_param='is:owner is:open',
-                                      limit=10,
-                                      start=20,
-                                      o_params=None)
-
-  @mock.patch('gerrit_util.GetRelatedChanges', return_value='')
-  def test_relatedchanges(self, util_mock):
-    gerrit_client.main([
-        'relatedchanges', '--host', 'https://example.org/foo', '--change',
-        'foo-change-id', '--revision', 'foo-revision-id'
-    ])
-    util_mock.assert_called_once_with('example.org',
-                                      change='foo-change-id',
-                                      revision='foo-revision-id')
-
-  @mock.patch('gerrit_util.CreateChange', return_value={})
-  def test_createchange(self, util_mock):
-    gerrit_client.main([
-        'createchange', '--host', 'https://example.org/foo', '--project',
-        'project', '--branch', 'main', '--subject', 'subject', '-p',
-        'work_in_progress=true'
-    ])
-    util_mock.assert_called_once_with('example.org',
-                                      'project',
-                                      branch='main',
-                                      subject='subject',
-                                      params=[('work_in_progress', 'true')])
-
-  @mock.patch('builtins.open', mock.mock_open())
-  @mock.patch('gerrit_util.ChangeEdit', return_value='')
-  def test_changeedit(self, util_mock):
-    open().read.return_value = 'test_data'
-    gerrit_client.main([
-        'changeedit', '--host', 'https://example.org/foo', '--change', '1',
-        '--path', 'path/to/file', '--file', '/my/foo'
-    ])
-    util_mock.assert_called_once_with('example.org', 1, 'path/to/file',
-                                      'test_data')
-
-  @mock.patch('gerrit_util.PublishChangeEdit', return_value='')
-  def test_publishchangeedit(self, util_mock):
-    gerrit_client.main([
-        'publishchangeedit', '--host', 'https://example.org/foo', '--change',
-        '1', '--notify', 'yes'
-    ])
-    util_mock.assert_called_once_with('example.org', 1, 'yes')
-
-  @mock.patch('gerrit_util.AbandonChange', return_value='')
-  def test_abandon(self, util_mock):
-    gerrit_client.main([
-        'abandon', '--host', 'https://example.org/foo', '-c', '1', '-m', 'bar'
-    ])
-    util_mock.assert_called_once_with('example.org', 1, 'bar')
-
-  @mock.patch('gerrit_util.SetReview', return_value='')
-  def test_setlabel(self, util_mock):
-    gerrit_client.main([
-        'setlabel',
-        '--host',
-        'https://example.org/foo',
-        '-c',
-        '1',
-        '-l',
-        'some-label',
-        '-2',
-    ])
-    util_mock.assert_called_once_with('example.org',
-                                      1,
-                                      labels={'some-label': '-2'})
+    @mock.patch('gerrit_util.GetGerritBranch', return_value='')
+    def test_branch_info(self, util_mock):
+        gerrit_client.main([
+            'branchinfo', '--host', 'https://example.org/foo', '--project',
+            'projectname', '--branch', 'branchname'
+        ])
+        util_mock.assert_called_once_with('example.org', 'projectname',
+                                          'branchname')
+
+    @mock.patch('gerrit_util.CreateGerritBranch', return_value='')
+    def test_branch(self, util_mock):
+        gerrit_client.main([
+            'branch', '--host', 'https://example.org/foo', '--project',
+            'projectname', '--branch', 'branchname', '--commit', 'commitname'
+        ])
+        util_mock.assert_called_once_with('example.org', 'projectname',
+                                          'branchname', 'commitname')
+
+    @mock.patch('gerrit_util.QueryChanges', return_value='')
+    def test_changes(self, util_mock):
+        gerrit_client.main([
+            'changes', '--host', 'https://example.org/foo', '-p', 'foo=bar',
+            '-p', 'baz=qux', '--limit', '10', '--start', '20', '-o', 'op1',
+            '-o', 'op2'
+        ])
+        util_mock.assert_called_once_with('example.org', [('foo', 'bar'),
+                                                          ('baz', 'qux')],
+                                          first_param=None,
+                                          limit=10,
+                                          start=20,
+                                          o_params=['op1', 'op2'])
+
+    @mock.patch('gerrit_util.QueryChanges', return_value='')
+    def test_changes_query(self, util_mock):
+        gerrit_client.main([
+            'changes',
+            '--host',
+            'https://example.org/foo',
+            '--query',
+            'is:owner is:open',
+            '--limit',
+            '10',
+            '--start',
+            '20',
+        ])
+        util_mock.assert_called_once_with('example.org', [],
+                                          first_param='is:owner is:open',
+                                          limit=10,
+                                          start=20,
+                                          o_params=None)
+
+    @mock.patch('gerrit_util.QueryChanges', return_value='')
+    def test_changes_params_query(self, util_mock):
+        gerrit_client.main([
+            'changes',
+            '--host',
+            'https://example.org/foo',
+            '--query',
+            'is:owner is:open',
+            '-p',
+            'foo=bar',
+            '--limit',
+            '10',
+            '--start',
+            '20',
+        ])
+        util_mock.assert_called_once_with('example.org', [('foo', 'bar')],
+                                          first_param='is:owner is:open',
+                                          limit=10,
+                                          start=20,
+                                          o_params=None)
+
+    @mock.patch('gerrit_util.GetRelatedChanges', return_value='')
+    def test_relatedchanges(self, util_mock):
+        gerrit_client.main([
+            'relatedchanges', '--host', 'https://example.org/foo', '--change',
+            'foo-change-id', '--revision', 'foo-revision-id'
+        ])
+        util_mock.assert_called_once_with('example.org',
+                                          change='foo-change-id',
+                                          revision='foo-revision-id')
+
+    @mock.patch('gerrit_util.CreateChange', return_value={})
+    def test_createchange(self, util_mock):
+        gerrit_client.main([
+            'createchange', '--host', 'https://example.org/foo', '--project',
+            'project', '--branch', 'main', '--subject', 'subject', '-p',
+            'work_in_progress=true'
+        ])
+        util_mock.assert_called_once_with('example.org',
+                                          'project',
+                                          branch='main',
+                                          subject='subject',
+                                          params=[('work_in_progress', 'true')])
+
+    @mock.patch('builtins.open', mock.mock_open())
+    @mock.patch('gerrit_util.ChangeEdit', return_value='')
+    def test_changeedit(self, util_mock):
+        open().read.return_value = 'test_data'
+        gerrit_client.main([
+            'changeedit', '--host', 'https://example.org/foo', '--change', '1',
+            '--path', 'path/to/file', '--file', '/my/foo'
+        ])
+        util_mock.assert_called_once_with('example.org', 1, 'path/to/file',
+                                          'test_data')
+
+    @mock.patch('gerrit_util.PublishChangeEdit', return_value='')
+    def test_publishchangeedit(self, util_mock):
+        gerrit_client.main([
+            'publishchangeedit', '--host', 'https://example.org/foo',
+            '--change', '1', '--notify', 'yes'
+        ])
+        util_mock.assert_called_once_with('example.org', 1, 'yes')
+
+    @mock.patch('gerrit_util.AbandonChange', return_value='')
+    def test_abandon(self, util_mock):
+        gerrit_client.main([
+            'abandon', '--host', 'https://example.org/foo', '-c', '1', '-m',
+            'bar'
+        ])
+        util_mock.assert_called_once_with('example.org', 1, 'bar')
+
+    @mock.patch('gerrit_util.SetReview', return_value='')
+    def test_setlabel(self, util_mock):
+        gerrit_client.main([
+            'setlabel',
+            '--host',
+            'https://example.org/foo',
+            '-c',
+            '1',
+            '-l',
+            'some-label',
+            '-2',
+        ])
+        util_mock.assert_called_once_with('example.org',
+                                          1,
+                                          labels={'some-label': '-2'})
 
 
 if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  unittest.main()
+    logging.basicConfig(
+        level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
+    unittest.main()

+ 515 - 461
tests/gerrit_util_test.py

@@ -21,478 +21,532 @@ import subprocess2
 
 
 class CookiesAuthenticatorTest(unittest.TestCase):
-  _GITCOOKIES = '\n'.join([
-      '\t'.join([
-          'chromium.googlesource.com',
-          'FALSE',
-          '/',
-          'TRUE',
-          '2147483647',
-          'o',
-          'git-user.chromium.org=1/chromium-secret',
-      ]),
-      '\t'.join([
-          'chromium-review.googlesource.com',
-          'FALSE',
-          '/',
-          'TRUE',
-          '2147483647',
-          'o',
-          'git-user.chromium.org=1/chromium-secret',
-      ]),
-      '\t'.join([
-          '.example.com',
-          'FALSE',
-          '/',
-          'TRUE',
-          '2147483647',
-          'o',
-          'example-bearer-token',
-      ]),
-      '\t'.join([
-          'another-path.example.com',
-          'FALSE',
-          '/foo',
-          'TRUE',
-          '2147483647',
-          'o',
-          'git-example.com=1/another-path-secret',
-      ]),
-      '\t'.join([
-          'another-key.example.com',
-          'FALSE',
-          '/',
-          'TRUE',
-          '2147483647',
-          'not-o',
-          'git-example.com=1/another-key-secret',
-      ]),
-      '#' + '\t'.join([
-          'chromium-review.googlesource.com',
-          'FALSE',
-          '/',
-          'TRUE',
-          '2147483647',
-          'o',
-          'git-invalid-user.chromium.org=1/invalid-chromium-secret',
-      ]),
-      'Some unrelated line\t that should not be here',
-  ])
-
-  def setUp(self):
-    mock.patch('gclient_utils.FileRead', return_value=self._GITCOOKIES).start()
-    mock.patch('os.getenv', return_value={}).start()
-    mock.patch('os.environ', {'HOME': '$HOME'}).start()
-    mock.patch('os.path.exists', return_value=True).start()
-    mock.patch(
-        'subprocess2.check_output',
-        side_effect=[
-            subprocess2.CalledProcessError(1, ['cmd'], 'cwd', 'out', 'err')],
-    ).start()
-    self.addCleanup(mock.patch.stopall)
-    self.maxDiff = None
-
-  def testGetNewPasswordUrl(self):
-    auth = gerrit_util.CookiesAuthenticator()
-    self.assertEqual('https://chromium.googlesource.com/new-password',
-                     auth.get_new_password_url('chromium.googlesource.com'))
-    self.assertEqual(
-        'https://chrome-internal.googlesource.com/new-password',
-        auth.get_new_password_url('chrome-internal-review.googlesource.com'))
-
-  def testGetNewPasswordMessage(self):
-    auth = gerrit_util.CookiesAuthenticator()
-    self.assertIn(
-        'https://chromium.googlesource.com/new-password',
-        auth.get_new_password_message('chromium-review.googlesource.com'))
-    self.assertIn(
-        'https://chrome-internal.googlesource.com/new-password',
-        auth.get_new_password_message('chrome-internal.googlesource.com'))
-
-  def testGetGitcookiesPath(self):
-    self.assertEqual(
-        os.path.expanduser(os.path.join('~', '.gitcookies')),
-        gerrit_util.CookiesAuthenticator().get_gitcookies_path())
-
-    subprocess2.check_output.side_effect = [b'http.cookiefile']
-    self.assertEqual(
-        'http.cookiefile',
-        gerrit_util.CookiesAuthenticator().get_gitcookies_path())
-    subprocess2.check_output.assert_called_with(
-        ['git', 'config', '--path', 'http.cookiefile'])
-
-    os.getenv.return_value = 'git-cookies-path'
-    self.assertEqual(
-        'git-cookies-path',
-        gerrit_util.CookiesAuthenticator().get_gitcookies_path())
-    os.getenv.assert_called_with('GIT_COOKIES_PATH')
-
-  def testGitcookies(self):
-    auth = gerrit_util.CookiesAuthenticator()
-    self.assertEqual(auth.gitcookies, {
-        'chromium.googlesource.com':
-            ('git-user.chromium.org', '1/chromium-secret'),
-        'chromium-review.googlesource.com':
-            ('git-user.chromium.org', '1/chromium-secret'),
-        '.example.com':
-            ('', 'example-bearer-token'),
-    })
-
-  def testGetAuthHeader(self):
-    expected_chromium_header = (
-        'Basic Z2l0LXVzZXIuY2hyb21pdW0ub3JnOjEvY2hyb21pdW0tc2VjcmV0')
-
-    auth = gerrit_util.CookiesAuthenticator()
-    self.assertEqual(
-        expected_chromium_header,
-        auth.get_auth_header('chromium.googlesource.com'))
-    self.assertEqual(
-        expected_chromium_header,
-        auth.get_auth_header('chromium-review.googlesource.com'))
-    self.assertEqual(
-        'Bearer example-bearer-token',
-        auth.get_auth_header('some-review.example.com'))
-
-  def testGetAuthEmail(self):
-    auth = gerrit_util.CookiesAuthenticator()
-    self.assertEqual(
-        'user@chromium.org',
-        auth.get_auth_email('chromium.googlesource.com'))
-    self.assertEqual(
-        'user@chromium.org',
-        auth.get_auth_email('chromium-review.googlesource.com'))
-    self.assertIsNone(auth.get_auth_email('some-review.example.com'))
+    _GITCOOKIES = '\n'.join([
+        '\t'.join([
+            'chromium.googlesource.com',
+            'FALSE',
+            '/',
+            'TRUE',
+            '2147483647',
+            'o',
+            'git-user.chromium.org=1/chromium-secret',
+        ]),
+        '\t'.join([
+            'chromium-review.googlesource.com',
+            'FALSE',
+            '/',
+            'TRUE',
+            '2147483647',
+            'o',
+            'git-user.chromium.org=1/chromium-secret',
+        ]),
+        '\t'.join([
+            '.example.com',
+            'FALSE',
+            '/',
+            'TRUE',
+            '2147483647',
+            'o',
+            'example-bearer-token',
+        ]),
+        '\t'.join([
+            'another-path.example.com',
+            'FALSE',
+            '/foo',
+            'TRUE',
+            '2147483647',
+            'o',
+            'git-example.com=1/another-path-secret',
+        ]),
+        '\t'.join([
+            'another-key.example.com',
+            'FALSE',
+            '/',
+            'TRUE',
+            '2147483647',
+            'not-o',
+            'git-example.com=1/another-key-secret',
+        ]),
+        '#' + '\t'.join([
+            'chromium-review.googlesource.com',
+            'FALSE',
+            '/',
+            'TRUE',
+            '2147483647',
+            'o',
+            'git-invalid-user.chromium.org=1/invalid-chromium-secret',
+        ]),
+        'Some unrelated line\t that should not be here',
+    ])
+
+    def setUp(self):
+        mock.patch('gclient_utils.FileRead',
+                   return_value=self._GITCOOKIES).start()
+        mock.patch('os.getenv', return_value={}).start()
+        mock.patch('os.environ', {'HOME': '$HOME'}).start()
+        mock.patch('os.path.exists', return_value=True).start()
+        mock.patch(
+            'subprocess2.check_output',
+            side_effect=[
+                subprocess2.CalledProcessError(1, ['cmd'], 'cwd', 'out', 'err')
+            ],
+        ).start()
+        self.addCleanup(mock.patch.stopall)
+        self.maxDiff = None
+
+    def testGetNewPasswordUrl(self):
+        auth = gerrit_util.CookiesAuthenticator()
+        self.assertEqual('https://chromium.googlesource.com/new-password',
+                         auth.get_new_password_url('chromium.googlesource.com'))
+        self.assertEqual(
+            'https://chrome-internal.googlesource.com/new-password',
+            auth.get_new_password_url(
+                'chrome-internal-review.googlesource.com'))
+
+    def testGetNewPasswordMessage(self):
+        auth = gerrit_util.CookiesAuthenticator()
+        self.assertIn(
+            'https://chromium.googlesource.com/new-password',
+            auth.get_new_password_message('chromium-review.googlesource.com'))
+        self.assertIn(
+            'https://chrome-internal.googlesource.com/new-password',
+            auth.get_new_password_message('chrome-internal.googlesource.com'))
+
+    def testGetGitcookiesPath(self):
+        self.assertEqual(
+            os.path.expanduser(os.path.join('~', '.gitcookies')),
+            gerrit_util.CookiesAuthenticator().get_gitcookies_path())
+
+        subprocess2.check_output.side_effect = [b'http.cookiefile']
+        self.assertEqual(
+            'http.cookiefile',
+            gerrit_util.CookiesAuthenticator().get_gitcookies_path())
+        subprocess2.check_output.assert_called_with(
+            ['git', 'config', '--path', 'http.cookiefile'])
+
+        os.getenv.return_value = 'git-cookies-path'
+        self.assertEqual(
+            'git-cookies-path',
+            gerrit_util.CookiesAuthenticator().get_gitcookies_path())
+        os.getenv.assert_called_with('GIT_COOKIES_PATH')
+
+    def testGitcookies(self):
+        auth = gerrit_util.CookiesAuthenticator()
+        self.assertEqual(
+            auth.gitcookies, {
+                'chromium.googlesource.com':
+                ('git-user.chromium.org', '1/chromium-secret'),
+                'chromium-review.googlesource.com':
+                ('git-user.chromium.org', '1/chromium-secret'),
+                '.example.com': ('', 'example-bearer-token'),
+            })
+
+    def testGetAuthHeader(self):
+        expected_chromium_header = (
+            'Basic Z2l0LXVzZXIuY2hyb21pdW0ub3JnOjEvY2hyb21pdW0tc2VjcmV0')
+
+        auth = gerrit_util.CookiesAuthenticator()
+        self.assertEqual(expected_chromium_header,
+                         auth.get_auth_header('chromium.googlesource.com'))
+        self.assertEqual(
+            expected_chromium_header,
+            auth.get_auth_header('chromium-review.googlesource.com'))
+        self.assertEqual('Bearer example-bearer-token',
+                         auth.get_auth_header('some-review.example.com'))
+
+    def testGetAuthEmail(self):
+        auth = gerrit_util.CookiesAuthenticator()
+        self.assertEqual('user@chromium.org',
+                         auth.get_auth_email('chromium.googlesource.com'))
+        self.assertEqual(
+            'user@chromium.org',
+            auth.get_auth_email('chromium-review.googlesource.com'))
+        self.assertIsNone(auth.get_auth_email('some-review.example.com'))
 
 
 class GceAuthenticatorTest(unittest.TestCase):
-  def setUp(self):
-    super(GceAuthenticatorTest, self).setUp()
-    mock.patch('httplib2.Http').start()
-    mock.patch('os.getenv', return_value=None).start()
-    mock.patch('gerrit_util.time_sleep').start()
-    mock.patch('gerrit_util.time_time').start()
-    self.addCleanup(mock.patch.stopall)
-    # GceAuthenticator has class variables that cache the results. Build a new
-    # class for every test to avoid inter-test dependencies.
-    class GceAuthenticator(gerrit_util.GceAuthenticator):
-      pass
-    self.GceAuthenticator = GceAuthenticator
-
-  def testIsGce_EnvVarSkip(self, *_mocks):
-    os.getenv.return_value = '1'
-    self.assertFalse(self.GceAuthenticator.is_gce())
-    os.getenv.assert_called_once_with('SKIP_GCE_AUTH_FOR_GIT')
-
-  def testIsGce_Error(self):
-    httplib2.Http().request.side_effect = httplib2.HttpLib2Error
-    self.assertFalse(self.GceAuthenticator.is_gce())
-
-  def testIsGce_500(self):
-    httplib2.Http().request.return_value = (mock.Mock(status=500), None)
-    self.assertFalse(self.GceAuthenticator.is_gce())
-    last_call = gerrit_util.time_sleep.mock_calls[-1]
-    self.assertLessEqual(last_call, mock.call(43.0))
-
-  def testIsGce_FailsThenSucceeds(self):
-    response = mock.Mock(status=200)
-    response.get.return_value = 'Google'
-    httplib2.Http().request.side_effect = [
-        (mock.Mock(status=500), None),
-        (response, 'who cares'),
-    ]
-    self.assertTrue(self.GceAuthenticator.is_gce())
-
-  def testIsGce_MetadataFlavorIsNotGoogle(self):
-    response = mock.Mock(status=200)
-    response.get.return_value = None
-    httplib2.Http().request.return_value = (response, 'who cares')
-    self.assertFalse(self.GceAuthenticator.is_gce())
-    response.get.assert_called_once_with('metadata-flavor')
-
-  def testIsGce_ResultIsCached(self):
-    response = mock.Mock(status=200)
-    response.get.return_value = 'Google'
-    httplib2.Http().request.side_effect = [(response, 'who cares')]
-    self.assertTrue(self.GceAuthenticator.is_gce())
-    self.assertTrue(self.GceAuthenticator.is_gce())
-    httplib2.Http().request.assert_called_once()
-
-  def testGetAuthHeader_Error(self):
-    httplib2.Http().request.side_effect = httplib2.HttpLib2Error
-    self.assertIsNone(self.GceAuthenticator().get_auth_header(''))
-
-  def testGetAuthHeader_500(self):
-    httplib2.Http().request.return_value = (mock.Mock(status=500), None)
-    self.assertIsNone(self.GceAuthenticator().get_auth_header(''))
-
-  def testGetAuthHeader_Non200(self):
-    httplib2.Http().request.return_value = (mock.Mock(status=403), None)
-    self.assertIsNone(self.GceAuthenticator().get_auth_header(''))
-
-  def testGetAuthHeader_OK(self):
-    httplib2.Http().request.return_value = (
-        mock.Mock(status=200),
-        '{"expires_in": 125, "token_type": "TYPE", "access_token": "TOKEN"}')
-    gerrit_util.time_time.return_value = 0
-    self.assertEqual('TYPE TOKEN', self.GceAuthenticator().get_auth_header(''))
-
-  def testGetAuthHeader_Cache(self):
-    httplib2.Http().request.return_value = (
-        mock.Mock(status=200),
-        '{"expires_in": 125, "token_type": "TYPE", "access_token": "TOKEN"}')
-    gerrit_util.time_time.return_value = 0
-    self.assertEqual('TYPE TOKEN', self.GceAuthenticator().get_auth_header(''))
-    self.assertEqual('TYPE TOKEN', self.GceAuthenticator().get_auth_header(''))
-    httplib2.Http().request.assert_called_once()
-
-  def testGetAuthHeader_CacheOld(self):
-    httplib2.Http().request.return_value = (
-        mock.Mock(status=200),
-        '{"expires_in": 125, "token_type": "TYPE", "access_token": "TOKEN"}')
-    gerrit_util.time_time.side_effect = [0, 100, 200]
-    self.assertEqual('TYPE TOKEN', self.GceAuthenticator().get_auth_header(''))
-    self.assertEqual('TYPE TOKEN', self.GceAuthenticator().get_auth_header(''))
-    self.assertEqual(2, len(httplib2.Http().request.mock_calls))
+    def setUp(self):
+        super(GceAuthenticatorTest, self).setUp()
+        mock.patch('httplib2.Http').start()
+        mock.patch('os.getenv', return_value=None).start()
+        mock.patch('gerrit_util.time_sleep').start()
+        mock.patch('gerrit_util.time_time').start()
+        self.addCleanup(mock.patch.stopall)
+
+        # GceAuthenticator has class variables that cache the results. Build a
+        # new class for every test to avoid inter-test dependencies.
+        class GceAuthenticator(gerrit_util.GceAuthenticator):
+            pass
+
+        self.GceAuthenticator = GceAuthenticator
+
+    def testIsGce_EnvVarSkip(self, *_mocks):
+        os.getenv.return_value = '1'
+        self.assertFalse(self.GceAuthenticator.is_gce())
+        os.getenv.assert_called_once_with('SKIP_GCE_AUTH_FOR_GIT')
+
+    def testIsGce_Error(self):
+        httplib2.Http().request.side_effect = httplib2.HttpLib2Error
+        self.assertFalse(self.GceAuthenticator.is_gce())
+
+    def testIsGce_500(self):
+        httplib2.Http().request.return_value = (mock.Mock(status=500), None)
+        self.assertFalse(self.GceAuthenticator.is_gce())
+        last_call = gerrit_util.time_sleep.mock_calls[-1]
+        self.assertLessEqual(last_call, mock.call(43.0))
+
+    def testIsGce_FailsThenSucceeds(self):
+        response = mock.Mock(status=200)
+        response.get.return_value = 'Google'
+        httplib2.Http().request.side_effect = [
+            (mock.Mock(status=500), None),
+            (response, 'who cares'),
+        ]
+        self.assertTrue(self.GceAuthenticator.is_gce())
+
+    def testIsGce_MetadataFlavorIsNotGoogle(self):
+        response = mock.Mock(status=200)
+        response.get.return_value = None
+        httplib2.Http().request.return_value = (response, 'who cares')
+        self.assertFalse(self.GceAuthenticator.is_gce())
+        response.get.assert_called_once_with('metadata-flavor')
+
+    def testIsGce_ResultIsCached(self):
+        response = mock.Mock(status=200)
+        response.get.return_value = 'Google'
+        httplib2.Http().request.side_effect = [(response, 'who cares')]
+        self.assertTrue(self.GceAuthenticator.is_gce())
+        self.assertTrue(self.GceAuthenticator.is_gce())
+        httplib2.Http().request.assert_called_once()
+
+    def testGetAuthHeader_Error(self):
+        httplib2.Http().request.side_effect = httplib2.HttpLib2Error
+        self.assertIsNone(self.GceAuthenticator().get_auth_header(''))
+
+    def testGetAuthHeader_500(self):
+        httplib2.Http().request.return_value = (mock.Mock(status=500), None)
+        self.assertIsNone(self.GceAuthenticator().get_auth_header(''))
+
+    def testGetAuthHeader_Non200(self):
+        httplib2.Http().request.return_value = (mock.Mock(status=403), None)
+        self.assertIsNone(self.GceAuthenticator().get_auth_header(''))
+
+    def testGetAuthHeader_OK(self):
+        httplib2.Http().request.return_value = (
+            mock.Mock(status=200),
+            '{"expires_in": 125, "token_type": "TYPE", "access_token": "TOKEN"}'
+        )
+        gerrit_util.time_time.return_value = 0
+        self.assertEqual('TYPE TOKEN',
+                         self.GceAuthenticator().get_auth_header(''))
+
+    def testGetAuthHeader_Cache(self):
+        httplib2.Http().request.return_value = (
+            mock.Mock(status=200),
+            '{"expires_in": 125, "token_type": "TYPE", "access_token": "TOKEN"}'
+        )
+        gerrit_util.time_time.return_value = 0
+        self.assertEqual('TYPE TOKEN',
+                         self.GceAuthenticator().get_auth_header(''))
+        self.assertEqual('TYPE TOKEN',
+                         self.GceAuthenticator().get_auth_header(''))
+        httplib2.Http().request.assert_called_once()
+
+    def testGetAuthHeader_CacheOld(self):
+        httplib2.Http().request.return_value = (
+            mock.Mock(status=200),
+            '{"expires_in": 125, "token_type": "TYPE", "access_token": "TOKEN"}'
+        )
+        gerrit_util.time_time.side_effect = [0, 100, 200]
+        self.assertEqual('TYPE TOKEN',
+                         self.GceAuthenticator().get_auth_header(''))
+        self.assertEqual('TYPE TOKEN',
+                         self.GceAuthenticator().get_auth_header(''))
+        self.assertEqual(2, len(httplib2.Http().request.mock_calls))
 
 
 class GerritUtilTest(unittest.TestCase):
-  def setUp(self):
-    super(GerritUtilTest, self).setUp()
-    mock.patch('gerrit_util.LOGGER').start()
-    mock.patch('gerrit_util.time_sleep').start()
-    mock.patch('metrics.collector').start()
-    mock.patch(
-        'metrics_utils.extract_http_metrics',
-        return_value='http_metrics').start()
-    self.addCleanup(mock.patch.stopall)
-
-  def testQueryString(self):
-    self.assertEqual('', gerrit_util._QueryString([]))
-    self.assertEqual(
-        'first%20param%2B', gerrit_util._QueryString([], 'first param+'))
-    self.assertEqual(
-        'key:val+foo:bar',
-         gerrit_util._QueryString([('key', 'val'), ('foo', 'bar')]))
-    self.assertEqual(
-        'first%20param%2B+key:val+foo:bar',
-         gerrit_util._QueryString(
-            [('key', 'val'), ('foo', 'bar')], 'first param+'))
-
-  @mock.patch('gerrit_util.Authenticator')
-  def testCreateHttpConn_Basic(self, mockAuth):
-    mockAuth.get().get_auth_header.return_value = None
-    conn = gerrit_util.CreateHttpConn('host.example.com', 'foo/bar')
-    self.assertEqual('host.example.com', conn.req_host)
-    self.assertEqual({
-        'uri': 'https://host.example.com/foo/bar',
-        'method': 'GET',
-        'headers': {},
-        'body': None,
-    }, conn.req_params)
-
-  @mock.patch('gerrit_util.Authenticator')
-  def testCreateHttpConn_Authenticated(self, mockAuth):
-    mockAuth.get().get_auth_header.return_value = 'Bearer token'
-    conn = gerrit_util.CreateHttpConn(
-        'host.example.com', 'foo/bar', headers={'header': 'value'})
-    self.assertEqual('host.example.com', conn.req_host)
-    self.assertEqual({
-        'uri': 'https://host.example.com/a/foo/bar',
-        'method': 'GET',
-        'headers': {'Authorization': 'Bearer token', 'header': 'value'},
-        'body': None,
-    }, conn.req_params)
-
-  @mock.patch('gerrit_util.Authenticator')
-  def testCreateHttpConn_Body(self, mockAuth):
-    mockAuth.get().get_auth_header.return_value = None
-    conn = gerrit_util.CreateHttpConn(
-        'host.example.com', 'foo/bar', body={'l': [1, 2, 3], 'd': {'k': 'v'}})
-    self.assertEqual('host.example.com', conn.req_host)
-    self.assertEqual({
-        'uri': 'https://host.example.com/foo/bar',
-        'method': 'GET',
-        'headers': {'Content-Type': 'application/json'},
-        'body': '{"d": {"k": "v"}, "l": [1, 2, 3]}',
-    }, conn.req_params)
-
-  def testReadHttpResponse_200(self):
-    conn = mock.Mock()
-    conn.req_params = {'uri': 'uri', 'method': 'method'}
-    conn.request.return_value = (mock.Mock(status=200), b'content\xe2\x9c\x94')
-
-    content = gerrit_util.ReadHttpResponse(conn)
-    self.assertEqual('content✔', content.getvalue())
-    metrics.collector.add_repeated.assert_called_once_with(
-        'http_requests', 'http_metrics')
-
-  def testReadHttpResponse_AuthenticationIssue(self):
-    for status in (302, 401, 403):
-      response = mock.Mock(status=status)
-      response.get.return_value = None
-      conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
-      conn.request.return_value = (response, b'')
-
-      with mock.patch('sys.stdout', StringIO()):
+    def setUp(self):
+        super(GerritUtilTest, self).setUp()
+        mock.patch('gerrit_util.LOGGER').start()
+        mock.patch('gerrit_util.time_sleep').start()
+        mock.patch('metrics.collector').start()
+        mock.patch('metrics_utils.extract_http_metrics',
+                   return_value='http_metrics').start()
+        self.addCleanup(mock.patch.stopall)
+
+    def testQueryString(self):
+        self.assertEqual('', gerrit_util._QueryString([]))
+        self.assertEqual('first%20param%2B',
+                         gerrit_util._QueryString([], 'first param+'))
+        self.assertEqual(
+            'key:val+foo:bar',
+            gerrit_util._QueryString([('key', 'val'), ('foo', 'bar')]))
+        self.assertEqual(
+            'first%20param%2B+key:val+foo:bar',
+            gerrit_util._QueryString([('key', 'val'), ('foo', 'bar')],
+                                     'first param+'))
+
+    @mock.patch('gerrit_util.Authenticator')
+    def testCreateHttpConn_Basic(self, mockAuth):
+        mockAuth.get().get_auth_header.return_value = None
+        conn = gerrit_util.CreateHttpConn('host.example.com', 'foo/bar')
+        self.assertEqual('host.example.com', conn.req_host)
+        self.assertEqual(
+            {
+                'uri': 'https://host.example.com/foo/bar',
+                'method': 'GET',
+                'headers': {},
+                'body': None,
+            }, conn.req_params)
+
+    @mock.patch('gerrit_util.Authenticator')
+    def testCreateHttpConn_Authenticated(self, mockAuth):
+        mockAuth.get().get_auth_header.return_value = 'Bearer token'
+        conn = gerrit_util.CreateHttpConn('host.example.com',
+                                          'foo/bar',
+                                          headers={'header': 'value'})
+        self.assertEqual('host.example.com', conn.req_host)
+        self.assertEqual(
+            {
+                'uri': 'https://host.example.com/a/foo/bar',
+                'method': 'GET',
+                'headers': {
+                    'Authorization': 'Bearer token',
+                    'header': 'value'
+                },
+                'body': None,
+            }, conn.req_params)
+
+    @mock.patch('gerrit_util.Authenticator')
+    def testCreateHttpConn_Body(self, mockAuth):
+        mockAuth.get().get_auth_header.return_value = None
+        conn = gerrit_util.CreateHttpConn('host.example.com',
+                                          'foo/bar',
+                                          body={
+                                              'l': [1, 2, 3],
+                                              'd': {
+                                                  'k': 'v'
+                                              }
+                                          })
+        self.assertEqual('host.example.com', conn.req_host)
+        self.assertEqual(
+            {
+                'uri': 'https://host.example.com/foo/bar',
+                'method': 'GET',
+                'headers': {
+                    'Content-Type': 'application/json'
+                },
+                'body': '{"d": {"k": "v"}, "l": [1, 2, 3]}',
+            }, conn.req_params)
+
+    def testReadHttpResponse_200(self):
+        conn = mock.Mock()
+        conn.req_params = {'uri': 'uri', 'method': 'method'}
+        conn.request.return_value = (mock.Mock(status=200),
+                                     b'content\xe2\x9c\x94')
+
+        content = gerrit_util.ReadHttpResponse(conn)
+        self.assertEqual('content✔', content.getvalue())
+        metrics.collector.add_repeated.assert_called_once_with(
+            'http_requests', 'http_metrics')
+
+    def testReadHttpResponse_AuthenticationIssue(self):
+        for status in (302, 401, 403):
+            response = mock.Mock(status=status)
+            response.get.return_value = None
+            conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
+            conn.request.return_value = (response, b'')
+
+            with mock.patch('sys.stdout', StringIO()):
+                with self.assertRaises(gerrit_util.GerritError) as cm:
+                    gerrit_util.ReadHttpResponse(conn)
+
+                self.assertEqual(status, cm.exception.http_status)
+                self.assertIn('Your Gerrit credentials might be misconfigured',
+                              sys.stdout.getvalue())
+
+    def testReadHttpResponse_ClientError(self):
+        conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
+        conn.request.return_value = (mock.Mock(status=404), b'')
+
+        with self.assertRaises(gerrit_util.GerritError) as cm:
+            gerrit_util.ReadHttpResponse(conn)
+
+        self.assertEqual(404, cm.exception.http_status)
+
+    def readHttpResponse_ServerErrorHelper(self, status):
+        conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
+        conn.request.return_value = (mock.Mock(status=status), b'')
+
         with self.assertRaises(gerrit_util.GerritError) as cm:
-          gerrit_util.ReadHttpResponse(conn)
+            gerrit_util.ReadHttpResponse(conn)
 
         self.assertEqual(status, cm.exception.http_status)
-        self.assertIn(
-            'Your Gerrit credentials might be misconfigured',
-            sys.stdout.getvalue())
-
-  def testReadHttpResponse_ClientError(self):
-    conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
-    conn.request.return_value = (mock.Mock(status=404), b'')
-
-    with self.assertRaises(gerrit_util.GerritError) as cm:
-      gerrit_util.ReadHttpResponse(conn)
-
-    self.assertEqual(404, cm.exception.http_status)
-
-  def readHttpResponse_ServerErrorHelper(self, status):
-    conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
-    conn.request.return_value = (mock.Mock(status=status), b'')
-
-    with self.assertRaises(gerrit_util.GerritError) as cm:
-      gerrit_util.ReadHttpResponse(conn)
-
-    self.assertEqual(status, cm.exception.http_status)
-    self.assertEqual(gerrit_util.TRY_LIMIT, len(conn.request.mock_calls))
-    last_call = gerrit_util.time_sleep.mock_calls[-1]
-    self.assertLessEqual(last_call, mock.call(422.0))
-
-  def testReadHttpResponse_ServerError(self):
-    self.readHttpResponse_ServerErrorHelper(status=404)
-    self.readHttpResponse_ServerErrorHelper(status=409)
-    self.readHttpResponse_ServerErrorHelper(status=429)
-    self.readHttpResponse_ServerErrorHelper(status=500)
-
-  def testReadHttpResponse_ServerErrorAndSuccess(self):
-    conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
-    conn.request.side_effect = [
-        (mock.Mock(status=500), b''),
-        (mock.Mock(status=200), b'content\xe2\x9c\x94'),
-    ]
-
-    self.assertEqual('content✔', gerrit_util.ReadHttpResponse(conn).getvalue())
-    self.assertEqual(2, len(conn.request.mock_calls))
-    gerrit_util.time_sleep.assert_called_once_with(12.0)
-
-  def testReadHttpResponse_TimeoutAndSuccess(self):
-    conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
-    conn.request.side_effect = [
-        socket.timeout('timeout'),
-        (mock.Mock(status=200), b'content\xe2\x9c\x94'),
-    ]
-
-    self.assertEqual('content✔', gerrit_util.ReadHttpResponse(conn).getvalue())
-    self.assertEqual(2, len(conn.request.mock_calls))
-    gerrit_util.time_sleep.assert_called_once_with(12.0)
-
-  def testReadHttpResponse_Expected404(self):
-    conn = mock.Mock()
-    conn.req_params = {'uri': 'uri', 'method': 'method'}
-    conn.request.return_value = (mock.Mock(status=404), b'content\xe2\x9c\x94')
-
-    content = gerrit_util.ReadHttpResponse(conn, (404,))
-    self.assertEqual('', content.getvalue())
-
-  @mock.patch('gerrit_util.ReadHttpResponse')
-  def testReadHttpJsonResponse_NotJSON(self, mockReadHttpResponse):
-    mockReadHttpResponse.return_value = StringIO('not json')
-    with self.assertRaises(gerrit_util.GerritError) as cm:
-      gerrit_util.ReadHttpJsonResponse(None)
-    self.assertEqual(cm.exception.http_status, 200)
-    self.assertEqual(
-        cm.exception.message, '(200) Unexpected json output: not json')
-
-  @mock.patch('gerrit_util.ReadHttpResponse')
-  def testReadHttpJsonResponse_EmptyValue(self, mockReadHttpResponse):
-    mockReadHttpResponse.return_value = StringIO(')]}\'')
-    self.assertIsNone(gerrit_util.ReadHttpJsonResponse(None))
-
-  @mock.patch('gerrit_util.ReadHttpResponse')
-  def testReadHttpJsonResponse_JSON(self, mockReadHttpResponse):
-    expected_value = {'foo': 'bar', 'baz': [1, '2', 3]}
-    mockReadHttpResponse.return_value = StringIO(
-        ')]}\'\n' + json.dumps(expected_value))
-    self.assertEqual(expected_value, gerrit_util.ReadHttpJsonResponse(None))
-
-  @mock.patch('gerrit_util.CreateHttpConn')
-  @mock.patch('gerrit_util.ReadHttpJsonResponse')
-  def testQueryChanges(self, mockJsonResponse, mockCreateHttpConn):
-    gerrit_util.QueryChanges(
-        'host', [('key', 'val'), ('foo', 'bar baz')], 'first param', limit=500,
-        o_params=['PARAM_A', 'PARAM_B'], start='start')
-    mockCreateHttpConn.assert_called_once_with(
-        'host', ('changes/?q=first%20param+key:val+foo:bar+baz'
-                 '&start=start'
-                 '&n=500'
-                 '&o=PARAM_A'
-                 '&o=PARAM_B'),
-        timeout=30.0)
-
-  def testQueryChanges_NoParams(self):
-    self.assertRaises(RuntimeError, gerrit_util.QueryChanges, 'host', [])
-
-  @mock.patch('gerrit_util.QueryChanges')
-  def testGenerateAllChanges(self, mockQueryChanges):
-    mockQueryChanges.side_effect = [
-        # First results page
-        [
-            {'_number': '4'},
-            {'_number': '3'},
-            {'_number': '2', '_more_changes': True},
-        ],
-        # Second results page, there are new changes, so second page includes
-        # some results from the first page.
-        [
-            {'_number': '2'},
-            {'_number': '1'},
-        ],
-        # GenerateAllChanges queries again from the start to get any new
-        # changes (5 in this case).
-        [
-            {'_number': '5'},
-            {'_number': '4'},
-            {'_number': '3', '_more_changes': True},
-
-        ],
-    ]
-
-    changes = list(gerrit_util.GenerateAllChanges('host', 'params'))
-    self.assertEqual(
-        [
-            {'_number': '4'},
-            {'_number': '3'},
-            {'_number': '2', '_more_changes': True},
-            {'_number': '1'},
-            {'_number': '5'},
-        ],
-        changes)
-    self.assertEqual(
-        [
+        self.assertEqual(gerrit_util.TRY_LIMIT, len(conn.request.mock_calls))
+        last_call = gerrit_util.time_sleep.mock_calls[-1]
+        self.assertLessEqual(last_call, mock.call(422.0))
+
+    def testReadHttpResponse_ServerError(self):
+        self.readHttpResponse_ServerErrorHelper(status=404)
+        self.readHttpResponse_ServerErrorHelper(status=409)
+        self.readHttpResponse_ServerErrorHelper(status=429)
+        self.readHttpResponse_ServerErrorHelper(status=500)
+
+    def testReadHttpResponse_ServerErrorAndSuccess(self):
+        conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
+        conn.request.side_effect = [
+            (mock.Mock(status=500), b''),
+            (mock.Mock(status=200), b'content\xe2\x9c\x94'),
+        ]
+
+        self.assertEqual('content✔',
+                         gerrit_util.ReadHttpResponse(conn).getvalue())
+        self.assertEqual(2, len(conn.request.mock_calls))
+        gerrit_util.time_sleep.assert_called_once_with(12.0)
+
+    def testReadHttpResponse_TimeoutAndSuccess(self):
+        conn = mock.Mock(req_params={'uri': 'uri', 'method': 'method'})
+        conn.request.side_effect = [
+            socket.timeout('timeout'),
+            (mock.Mock(status=200), b'content\xe2\x9c\x94'),
+        ]
+
+        self.assertEqual('content✔',
+                         gerrit_util.ReadHttpResponse(conn).getvalue())
+        self.assertEqual(2, len(conn.request.mock_calls))
+        gerrit_util.time_sleep.assert_called_once_with(12.0)
+
+    def testReadHttpResponse_Expected404(self):
+        conn = mock.Mock()
+        conn.req_params = {'uri': 'uri', 'method': 'method'}
+        conn.request.return_value = (mock.Mock(status=404),
+                                     b'content\xe2\x9c\x94')
+
+        content = gerrit_util.ReadHttpResponse(conn, (404, ))
+        self.assertEqual('', content.getvalue())
+
+    @mock.patch('gerrit_util.ReadHttpResponse')
+    def testReadHttpJsonResponse_NotJSON(self, mockReadHttpResponse):
+        mockReadHttpResponse.return_value = StringIO('not json')
+        with self.assertRaises(gerrit_util.GerritError) as cm:
+            gerrit_util.ReadHttpJsonResponse(None)
+        self.assertEqual(cm.exception.http_status, 200)
+        self.assertEqual(cm.exception.message,
+                         '(200) Unexpected json output: not json')
+
+    @mock.patch('gerrit_util.ReadHttpResponse')
+    def testReadHttpJsonResponse_EmptyValue(self, mockReadHttpResponse):
+        mockReadHttpResponse.return_value = StringIO(')]}\'')
+        self.assertIsNone(gerrit_util.ReadHttpJsonResponse(None))
+
+    @mock.patch('gerrit_util.ReadHttpResponse')
+    def testReadHttpJsonResponse_JSON(self, mockReadHttpResponse):
+        expected_value = {'foo': 'bar', 'baz': [1, '2', 3]}
+        mockReadHttpResponse.return_value = StringIO(')]}\'\n' +
+                                                     json.dumps(expected_value))
+        self.assertEqual(expected_value, gerrit_util.ReadHttpJsonResponse(None))
+
+    @mock.patch('gerrit_util.CreateHttpConn')
+    @mock.patch('gerrit_util.ReadHttpJsonResponse')
+    def testQueryChanges(self, mockJsonResponse, mockCreateHttpConn):
+        gerrit_util.QueryChanges('host', [('key', 'val'), ('foo', 'bar baz')],
+                                 'first param',
+                                 limit=500,
+                                 o_params=['PARAM_A', 'PARAM_B'],
+                                 start='start')
+        mockCreateHttpConn.assert_called_once_with(
+            'host', ('changes/?q=first%20param+key:val+foo:bar+baz'
+                     '&start=start'
+                     '&n=500'
+                     '&o=PARAM_A'
+                     '&o=PARAM_B'),
+            timeout=30.0)
+
+    def testQueryChanges_NoParams(self):
+        self.assertRaises(RuntimeError, gerrit_util.QueryChanges, 'host', [])
+
+    @mock.patch('gerrit_util.QueryChanges')
+    def testGenerateAllChanges(self, mockQueryChanges):
+        mockQueryChanges.side_effect = [
+            # First results page
+            [
+                {
+                    '_number': '4'
+                },
+                {
+                    '_number': '3'
+                },
+                {
+                    '_number': '2',
+                    '_more_changes': True
+                },
+            ],
+            # Second results page, there are new changes, so second page
+            # includes some results from the first page.
+            [
+                {
+                    '_number': '2'
+                },
+                {
+                    '_number': '1'
+                },
+            ],
+            # GenerateAllChanges queries again from the start to get any new
+            # changes (5 in this case).
+            [
+                {
+                    '_number': '5'
+                },
+                {
+                    '_number': '4'
+                },
+                {
+                    '_number': '3',
+                    '_more_changes': True
+                },
+            ],
+        ]
+
+        changes = list(gerrit_util.GenerateAllChanges('host', 'params'))
+        self.assertEqual([
+            {
+                '_number': '4'
+            },
+            {
+                '_number': '3'
+            },
+            {
+                '_number': '2',
+                '_more_changes': True
+            },
+            {
+                '_number': '1'
+            },
+            {
+                '_number': '5'
+            },
+        ], changes)
+        self.assertEqual([
             mock.call('host', 'params', None, 500, None, 0),
             mock.call('host', 'params', None, 500, None, 3),
             mock.call('host', 'params', None, 500, None, 0),
-        ],
-        mockQueryChanges.mock_calls)
-
-  @mock.patch('gerrit_util.CreateHttpConn')
-  @mock.patch('gerrit_util.ReadHttpJsonResponse')
-  def testIsCodeOwnersEnabledOnRepo_Disabled(
-      self, mockJsonResponse, mockCreateHttpConn):
-    mockJsonResponse.return_value = {'status': {'disabled': True}}
-    self.assertFalse(gerrit_util.IsCodeOwnersEnabledOnRepo('host', 'repo'))
-
-  @mock.patch('gerrit_util.CreateHttpConn')
-  @mock.patch('gerrit_util.ReadHttpJsonResponse')
-  def testIsCodeOwnersEnabledOnRepo_Enabled(
-      self, mockJsonResponse, mockCreateHttpConn):
-    mockJsonResponse.return_value = {'status': {}}
-    self.assertTrue(gerrit_util.IsCodeOwnersEnabledOnRepo('host', 'repo'))
+        ], mockQueryChanges.mock_calls)
+
+    @mock.patch('gerrit_util.CreateHttpConn')
+    @mock.patch('gerrit_util.ReadHttpJsonResponse')
+    def testIsCodeOwnersEnabledOnRepo_Disabled(self, mockJsonResponse,
+                                               mockCreateHttpConn):
+        mockJsonResponse.return_value = {'status': {'disabled': True}}
+        self.assertFalse(gerrit_util.IsCodeOwnersEnabledOnRepo('host', 'repo'))
+
+    @mock.patch('gerrit_util.CreateHttpConn')
+    @mock.patch('gerrit_util.ReadHttpJsonResponse')
+    def testIsCodeOwnersEnabledOnRepo_Enabled(self, mockJsonResponse,
+                                              mockCreateHttpConn):
+        mockJsonResponse.return_value = {'status': {}}
+        self.assertTrue(gerrit_util.IsCodeOwnersEnabledOnRepo('host', 'repo'))
+
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 256 - 251
tests/git_cache_test.py

@@ -2,7 +2,6 @@
 # Copyright 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for git_cache.py"""
 
 from io import StringIO
@@ -21,266 +20,272 @@ sys.path.insert(0, DEPOT_TOOLS_ROOT)
 from testing_support import coverage_utils
 import git_cache
 
+
 class GitCacheTest(unittest.TestCase):
-  def setUp(self):
-    self.cache_dir = tempfile.mkdtemp(prefix='git_cache_test_')
-    self.addCleanup(shutil.rmtree, self.cache_dir, ignore_errors=True)
-    self.origin_dir = tempfile.mkdtemp(suffix='origin.git')
-    self.addCleanup(shutil.rmtree, self.origin_dir, ignore_errors=True)
-    git_cache.Mirror.SetCachePath(self.cache_dir)
-
-    # Ensure git_cache works with safe.bareRepository.
-    mock.patch.dict(
-        'os.environ', {
-            'GIT_CONFIG_GLOBAL': os.path.join(self.cache_dir, '.gitconfig'),
-        }).start()
-    self.addCleanup(mock.patch.stopall)
-    self.git([
-        'config', '--file',
-        os.path.join(self.cache_dir, '.gitconfig'), '--add',
-        'safe.bareRepository', 'explicit'
-    ])
-
-  def git(self, cmd, cwd=None):
-    cwd = cwd or self.origin_dir
-    git = 'git.bat' if sys.platform == 'win32' else 'git'
-    subprocess.check_call([git] + cmd, cwd=cwd)
-
-  def testParseFetchSpec(self):
-    testData = [
-        ([], []),
-        (['main'], [('+refs/heads/main:refs/heads/main',
-                       r'\+refs/heads/main:.*')]),
-        (['main/'], [('+refs/heads/main:refs/heads/main',
-                       r'\+refs/heads/main:.*')]),
-        (['+main'], [('+refs/heads/main:refs/heads/main',
-                       r'\+refs/heads/main:.*')]),
-        (['master'], [('+refs/heads/master:refs/heads/master',
-                       r'\+refs/heads/master:.*')]),
-        (['master/'], [('+refs/heads/master:refs/heads/master',
-                       r'\+refs/heads/master:.*')]),
-        (['+master'], [('+refs/heads/master:refs/heads/master',
-                       r'\+refs/heads/master:.*')]),
-        (['refs/heads/*'], [('+refs/heads/*:refs/heads/*',
-                            r'\+refs/heads/\*:.*')]),
-        (['foo/bar/*', 'baz'], [('+refs/heads/foo/bar/*:refs/heads/foo/bar/*',
+    def setUp(self):
+        self.cache_dir = tempfile.mkdtemp(prefix='git_cache_test_')
+        self.addCleanup(shutil.rmtree, self.cache_dir, ignore_errors=True)
+        self.origin_dir = tempfile.mkdtemp(suffix='origin.git')
+        self.addCleanup(shutil.rmtree, self.origin_dir, ignore_errors=True)
+        git_cache.Mirror.SetCachePath(self.cache_dir)
+
+        # Ensure git_cache works with safe.bareRepository.
+        mock.patch.dict(
+            'os.environ', {
+                'GIT_CONFIG_GLOBAL': os.path.join(self.cache_dir, '.gitconfig'),
+            }).start()
+        self.addCleanup(mock.patch.stopall)
+        self.git([
+            'config', '--file',
+            os.path.join(self.cache_dir, '.gitconfig'), '--add',
+            'safe.bareRepository', 'explicit'
+        ])
+
+    def git(self, cmd, cwd=None):
+        cwd = cwd or self.origin_dir
+        git = 'git.bat' if sys.platform == 'win32' else 'git'
+        subprocess.check_call([git] + cmd, cwd=cwd)
+
+    def testParseFetchSpec(self):
+        testData = [([], []),
+                    (['main'], [('+refs/heads/main:refs/heads/main',
+                                 r'\+refs/heads/main:.*')]),
+                    (['main/'], [('+refs/heads/main:refs/heads/main',
+                                  r'\+refs/heads/main:.*')]),
+                    (['+main'], [('+refs/heads/main:refs/heads/main',
+                                  r'\+refs/heads/main:.*')]),
+                    (['master'], [('+refs/heads/master:refs/heads/master',
+                                   r'\+refs/heads/master:.*')]),
+                    (['master/'], [('+refs/heads/master:refs/heads/master',
+                                    r'\+refs/heads/master:.*')]),
+                    (['+master'], [('+refs/heads/master:refs/heads/master',
+                                    r'\+refs/heads/master:.*')]),
+                    (['refs/heads/*'], [('+refs/heads/*:refs/heads/*',
+                                         r'\+refs/heads/\*:.*')]),
+                    (['foo/bar/*',
+                      'baz'], [('+refs/heads/foo/bar/*:refs/heads/foo/bar/*',
                                 r'\+refs/heads/foo/bar/\*:.*'),
                                ('+refs/heads/baz:refs/heads/baz',
                                 r'\+refs/heads/baz:.*')]),
-        (['refs/foo/*:refs/bar/*'], [('+refs/foo/*:refs/bar/*',
-                                      r'\+refs/foo/\*:.*')])
-        ]
-
-    mirror = git_cache.Mirror('test://phony.example.biz')
-    for fetch_specs, expected in testData:
-      mirror = git_cache.Mirror('test://phony.example.biz', refs=fetch_specs)
-      self.assertEqual(mirror.fetch_specs, set(expected))
-
-  def testPopulate(self):
-    self.git(['init', '-q'])
-    with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
-      f.write('touched\n')
-    self.git(['add', 'foo'])
-    self.git([
-        '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com', 'commit',
-        '-m', 'foo'
-    ])
-
-    mirror = git_cache.Mirror(self.origin_dir)
-    mirror.populate()
-
-  def testPopulateResetFetchConfig(self):
-    self.git(['init', '-q'])
-    with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
-      f.write('touched\n')
-    self.git(['add', 'foo'])
-    self.git([
-        '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com', 'commit',
-        '-m', 'foo'
-    ])
-
-    mirror = git_cache.Mirror(self.origin_dir)
-    mirror.populate()
-
-    # Add a bad refspec to the cache's fetch config.
-    cache_dir = os.path.join(
-        self.cache_dir, mirror.UrlToCacheDir(self.origin_dir))
-    self.git([
-        '--git-dir', cache_dir, 'config', '--add', 'remote.origin.fetch',
-        '+refs/heads/foo:refs/heads/foo'
-    ],
-             cwd=cache_dir)
-
-    mirror.populate(reset_fetch_config=True)
-
-  def testPopulateTwice(self):
-    self.git(['init', '-q'])
-    with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
-      f.write('touched\n')
-    self.git(['add', 'foo'])
-    self.git([
-        '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com', 'commit',
-        '-m', 'foo'
-    ])
-
-    mirror = git_cache.Mirror(self.origin_dir)
-    mirror.populate()
-
-    mirror.populate()
-
-  @mock.patch('sys.stdout', StringIO())
-  def testPruneRequired(self):
-    self.git(['init', '-q'])
-    with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
-      f.write('touched\n')
-    self.git(['checkout', '-b', 'foo'])
-    self.git(['add', 'foo'])
-    self.git([
-        '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com', 'commit',
-        '-m', 'foo'
-    ])
-    mirror = git_cache.Mirror(self.origin_dir)
-    mirror.populate()
-    self.git(['checkout', '-b', 'foo_tmp', 'foo'])
-    self.git(['branch', '-D', 'foo'])
-    self.git(['checkout', '-b', 'foo/bar', 'foo_tmp'])
-    mirror.populate()
-    self.assertNotIn(git_cache.GIT_CACHE_CORRUPT_MESSAGE, sys.stdout.getvalue())
-
-  def _makeGitRepoWithTag(self):
-    self.git(['init', '-q'])
-    with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
-      f.write('touched\n')
-    self.git(['add', 'foo'])
-    self.git([
-        '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com', 'commit',
-        '-m', 'foo'
-    ])
-    self.git(['tag', 'TAG'])
-    self.git(['pack-refs'])
-
-  def testPopulateFetchTagsByDefault(self):
-    self._makeGitRepoWithTag()
-
-    # Default behaviour includes tags.
-    mirror = git_cache.Mirror(self.origin_dir)
-    mirror.populate()
-
-    cache_dir = os.path.join(self.cache_dir,
-                             mirror.UrlToCacheDir(self.origin_dir))
-    self.assertTrue(os.path.exists(cache_dir + '/refs/tags/TAG'))
-
-  def testPopulateFetchWithoutTags(self):
-    self._makeGitRepoWithTag()
-
-    # Ask to not include tags.
-    mirror = git_cache.Mirror(self.origin_dir)
-    mirror.populate(no_fetch_tags=True)
-
-    cache_dir = os.path.join(self.cache_dir,
-                             mirror.UrlToCacheDir(self.origin_dir))
-    self.assertFalse(os.path.exists(cache_dir + '/refs/tags/TAG'))
-
-  def testPopulateResetFetchConfigEmptyFetchConfig(self):
-    self.git(['init', '-q'])
-    with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
-      f.write('touched\n')
-    self.git(['add', 'foo'])
-    self.git([
-        '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com', 'commit',
-        '-m', 'foo'
-    ])
-
-    mirror = git_cache.Mirror(self.origin_dir)
-    mirror.populate(reset_fetch_config=True)
+                    (['refs/foo/*:refs/bar/*'], [('+refs/foo/*:refs/bar/*',
+                                                  r'\+refs/foo/\*:.*')])]
+
+        mirror = git_cache.Mirror('test://phony.example.biz')
+        for fetch_specs, expected in testData:
+            mirror = git_cache.Mirror('test://phony.example.biz',
+                                      refs=fetch_specs)
+            self.assertEqual(mirror.fetch_specs, set(expected))
+
+    def testPopulate(self):
+        self.git(['init', '-q'])
+        with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
+            f.write('touched\n')
+        self.git(['add', 'foo'])
+        self.git([
+            '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com',
+            'commit', '-m', 'foo'
+        ])
+
+        mirror = git_cache.Mirror(self.origin_dir)
+        mirror.populate()
+
+    def testPopulateResetFetchConfig(self):
+        self.git(['init', '-q'])
+        with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
+            f.write('touched\n')
+        self.git(['add', 'foo'])
+        self.git([
+            '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com',
+            'commit', '-m', 'foo'
+        ])
+
+        mirror = git_cache.Mirror(self.origin_dir)
+        mirror.populate()
+
+        # Add a bad refspec to the cache's fetch config.
+        cache_dir = os.path.join(self.cache_dir,
+                                 mirror.UrlToCacheDir(self.origin_dir))
+        self.git([
+            '--git-dir', cache_dir, 'config', '--add', 'remote.origin.fetch',
+            '+refs/heads/foo:refs/heads/foo'
+        ],
+                 cwd=cache_dir)
+
+        mirror.populate(reset_fetch_config=True)
+
+    def testPopulateTwice(self):
+        self.git(['init', '-q'])
+        with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
+            f.write('touched\n')
+        self.git(['add', 'foo'])
+        self.git([
+            '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com',
+            'commit', '-m', 'foo'
+        ])
+
+        mirror = git_cache.Mirror(self.origin_dir)
+        mirror.populate()
+
+        mirror.populate()
+
+    @mock.patch('sys.stdout', StringIO())
+    def testPruneRequired(self):
+        self.git(['init', '-q'])
+        with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
+            f.write('touched\n')
+        self.git(['checkout', '-b', 'foo'])
+        self.git(['add', 'foo'])
+        self.git([
+            '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com',
+            'commit', '-m', 'foo'
+        ])
+        mirror = git_cache.Mirror(self.origin_dir)
+        mirror.populate()
+        self.git(['checkout', '-b', 'foo_tmp', 'foo'])
+        self.git(['branch', '-D', 'foo'])
+        self.git(['checkout', '-b', 'foo/bar', 'foo_tmp'])
+        mirror.populate()
+        self.assertNotIn(git_cache.GIT_CACHE_CORRUPT_MESSAGE,
+                         sys.stdout.getvalue())
+
+    def _makeGitRepoWithTag(self):
+        self.git(['init', '-q'])
+        with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
+            f.write('touched\n')
+        self.git(['add', 'foo'])
+        self.git([
+            '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com',
+            'commit', '-m', 'foo'
+        ])
+        self.git(['tag', 'TAG'])
+        self.git(['pack-refs'])
+
+    def testPopulateFetchTagsByDefault(self):
+        self._makeGitRepoWithTag()
+
+        # Default behaviour includes tags.
+        mirror = git_cache.Mirror(self.origin_dir)
+        mirror.populate()
+
+        cache_dir = os.path.join(self.cache_dir,
+                                 mirror.UrlToCacheDir(self.origin_dir))
+        self.assertTrue(os.path.exists(cache_dir + '/refs/tags/TAG'))
+
+    def testPopulateFetchWithoutTags(self):
+        self._makeGitRepoWithTag()
+
+        # Ask to not include tags.
+        mirror = git_cache.Mirror(self.origin_dir)
+        mirror.populate(no_fetch_tags=True)
+
+        cache_dir = os.path.join(self.cache_dir,
+                                 mirror.UrlToCacheDir(self.origin_dir))
+        self.assertFalse(os.path.exists(cache_dir + '/refs/tags/TAG'))
+
+    def testPopulateResetFetchConfigEmptyFetchConfig(self):
+        self.git(['init', '-q'])
+        with open(os.path.join(self.origin_dir, 'foo'), 'w') as f:
+            f.write('touched\n')
+        self.git(['add', 'foo'])
+        self.git([
+            '-c', 'user.name=Test user', '-c', 'user.email=joj@test.com',
+            'commit', '-m', 'foo'
+        ])
+
+        mirror = git_cache.Mirror(self.origin_dir)
+        mirror.populate(reset_fetch_config=True)
 
 
 class GitCacheDirTest(unittest.TestCase):
-  def setUp(self):
-    try:
-      delattr(git_cache.Mirror, 'cachepath')
-    except AttributeError:
-      pass
-    super(GitCacheDirTest, self).setUp()
-
-  def tearDown(self):
-    try:
-      delattr(git_cache.Mirror, 'cachepath')
-    except AttributeError:
-      pass
-    super(GitCacheDirTest, self).tearDown()
-
-  def test_git_config_read(self):
-    (fd, tmpFile) = tempfile.mkstemp()
-    old = git_cache.Mirror._GIT_CONFIG_LOCATION
-    try:
-      try:
-        os.write(fd, b'[cache]\n  cachepath="hello world"\n')
-      finally:
-        os.close(fd)
-
-      git_cache.Mirror._GIT_CONFIG_LOCATION = ['-f', tmpFile]
-
-      self.assertEqual(git_cache.Mirror.GetCachePath(), 'hello world')
-    finally:
-      git_cache.Mirror._GIT_CONFIG_LOCATION = old
-      os.remove(tmpFile)
-
-  def test_environ_read(self):
-    path = os.environ.get('GIT_CACHE_PATH')
-    config = os.environ.get('GIT_CONFIG')
-    try:
-      os.environ['GIT_CACHE_PATH'] = 'hello world'
-      os.environ['GIT_CONFIG'] = 'disabled'
-
-      self.assertEqual(git_cache.Mirror.GetCachePath(), 'hello world')
-    finally:
-      for name, val in zip(('GIT_CACHE_PATH', 'GIT_CONFIG'), (path, config)):
-        if val is None:
-          os.environ.pop(name, None)
-        else:
-          os.environ[name] = val
-
-  def test_manual_set(self):
-    git_cache.Mirror.SetCachePath('hello world')
-    self.assertEqual(git_cache.Mirror.GetCachePath(), 'hello world')
-
-  def test_unconfigured(self):
-    path = os.environ.get('GIT_CACHE_PATH')
-    config = os.environ.get('GIT_CONFIG')
-    try:
-      os.environ.pop('GIT_CACHE_PATH', None)
-      os.environ['GIT_CONFIG'] = 'disabled'
-
-      with self.assertRaisesRegexp(RuntimeError, 'cache\.cachepath'):
-        git_cache.Mirror.GetCachePath()
-
-      # negatively cached value still raises
-      with self.assertRaisesRegexp(RuntimeError, 'cache\.cachepath'):
-        git_cache.Mirror.GetCachePath()
-    finally:
-      for name, val in zip(('GIT_CACHE_PATH', 'GIT_CONFIG'), (path, config)):
-        if val is None:
-          os.environ.pop(name, None)
-        else:
-          os.environ[name] = val
+    def setUp(self):
+        try:
+            delattr(git_cache.Mirror, 'cachepath')
+        except AttributeError:
+            pass
+        super(GitCacheDirTest, self).setUp()
+
+    def tearDown(self):
+        try:
+            delattr(git_cache.Mirror, 'cachepath')
+        except AttributeError:
+            pass
+        super(GitCacheDirTest, self).tearDown()
+
+    def test_git_config_read(self):
+        (fd, tmpFile) = tempfile.mkstemp()
+        old = git_cache.Mirror._GIT_CONFIG_LOCATION
+        try:
+            try:
+                os.write(fd, b'[cache]\n  cachepath="hello world"\n')
+            finally:
+                os.close(fd)
+
+            git_cache.Mirror._GIT_CONFIG_LOCATION = ['-f', tmpFile]
+
+            self.assertEqual(git_cache.Mirror.GetCachePath(), 'hello world')
+        finally:
+            git_cache.Mirror._GIT_CONFIG_LOCATION = old
+            os.remove(tmpFile)
+
+    def test_environ_read(self):
+        path = os.environ.get('GIT_CACHE_PATH')
+        config = os.environ.get('GIT_CONFIG')
+        try:
+            os.environ['GIT_CACHE_PATH'] = 'hello world'
+            os.environ['GIT_CONFIG'] = 'disabled'
+
+            self.assertEqual(git_cache.Mirror.GetCachePath(), 'hello world')
+        finally:
+            for name, val in zip(('GIT_CACHE_PATH', 'GIT_CONFIG'),
+                                 (path, config)):
+                if val is None:
+                    os.environ.pop(name, None)
+                else:
+                    os.environ[name] = val
+
+    def test_manual_set(self):
+        git_cache.Mirror.SetCachePath('hello world')
+        self.assertEqual(git_cache.Mirror.GetCachePath(), 'hello world')
+
+    def test_unconfigured(self):
+        path = os.environ.get('GIT_CACHE_PATH')
+        config = os.environ.get('GIT_CONFIG')
+        try:
+            os.environ.pop('GIT_CACHE_PATH', None)
+            os.environ['GIT_CONFIG'] = 'disabled'
+
+            with self.assertRaisesRegexp(RuntimeError, 'cache\.cachepath'):
+                git_cache.Mirror.GetCachePath()
+
+            # negatively cached value still raises
+            with self.assertRaisesRegexp(RuntimeError, 'cache\.cachepath'):
+                git_cache.Mirror.GetCachePath()
+        finally:
+            for name, val in zip(('GIT_CACHE_PATH', 'GIT_CONFIG'),
+                                 (path, config)):
+                if val is None:
+                    os.environ.pop(name, None)
+                else:
+                    os.environ[name] = val
 
 
 class MirrorTest(unittest.TestCase):
-  def test_same_cache_for_authenticated_and_unauthenticated_urls(self):
-    # GoB can fetch a repo via two different URLs; if the url contains '/a/'
-    # it forces authenticated access instead of allowing anonymous access,
-    # even in the case where a repo is public. We want this in order to make
-    # sure bots are authenticated and get the right quotas. However, we
-    # only want to maintain a single cache for the repo.
-    self.assertEqual(git_cache.Mirror.UrlToCacheDir(
-        'https://chromium.googlesource.com/a/chromium/src.git'),
-        'chromium.googlesource.com-chromium-src')
+    def test_same_cache_for_authenticated_and_unauthenticated_urls(self):
+        # GoB can fetch a repo via two different URLs; if the url contains '/a/'
+        # it forces authenticated access instead of allowing anonymous access,
+        # even in the case where a repo is public. We want this in order to make
+        # sure bots are authenticated and get the right quotas. However, we
+        # only want to maintain a single cache for the repo.
+        self.assertEqual(
+            git_cache.Mirror.UrlToCacheDir(
+                'https://chromium.googlesource.com/a/chromium/src.git'),
+            'chromium.googlesource.com-chromium-src')
 
 
 if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  sys.exit(coverage_utils.covered_main((
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py')
-  ), required_percentage=0))
+    logging.basicConfig(
+        level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
+    sys.exit(
+        coverage_utils.covered_main(
+            (os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py')),
+            required_percentage=0))

Plik diff jest za duży
+ 4217 - 4627
tests/git_cl_test.py


Plik diff jest za duży
+ 500 - 492
tests/git_common_test.py


+ 65 - 48
tests/git_dates_test.py

@@ -16,63 +16,80 @@ from testing_support import coverage_utils
 
 
 class GitDatesTestBase(unittest.TestCase):
-  @classmethod
-  def setUpClass(cls):
-    super(GitDatesTestBase, cls).setUpClass()
-    import git_dates
-    cls.git_dates = git_dates
+    @classmethod
+    def setUpClass(cls):
+        super(GitDatesTestBase, cls).setUpClass()
+        import git_dates
+        cls.git_dates = git_dates
 
 
 class GitDatesTest(GitDatesTestBase):
-  def testTimestampOffsetToDatetime(self):
-    # 2016-01-25 06:25:43 UTC
-    timestamp = 1453703143
+    def testTimestampOffsetToDatetime(self):
+        # 2016-01-25 06:25:43 UTC
+        timestamp = 1453703143
 
-    offset = '+1100'
-    expected_tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=11), '')
-    expected = datetime.datetime(2016, 1, 25, 17, 25, 43, tzinfo=expected_tz)
-    result = self.git_dates.timestamp_offset_to_datetime(timestamp, offset)
-    self.assertEqual(expected, result)
-    self.assertEqual(datetime.timedelta(hours=11), result.utcoffset())
-    self.assertEqual('+1100', result.tzname())
-    self.assertEqual(datetime.timedelta(0), result.dst())
+        offset = '+1100'
+        expected_tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=11),
+                                                   '')
+        expected = datetime.datetime(2016,
+                                     1,
+                                     25,
+                                     17,
+                                     25,
+                                     43,
+                                     tzinfo=expected_tz)
+        result = self.git_dates.timestamp_offset_to_datetime(timestamp, offset)
+        self.assertEqual(expected, result)
+        self.assertEqual(datetime.timedelta(hours=11), result.utcoffset())
+        self.assertEqual('+1100', result.tzname())
+        self.assertEqual(datetime.timedelta(0), result.dst())
 
-    offset = '-0800'
-    expected_tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=-8), '')
-    expected = datetime.datetime(2016, 1, 24, 22, 25, 43, tzinfo=expected_tz)
-    result = self.git_dates.timestamp_offset_to_datetime(timestamp, offset)
-    self.assertEqual(expected, result)
-    self.assertEqual(datetime.timedelta(hours=-8), result.utcoffset())
-    self.assertEqual('-0800', result.tzname())
-    self.assertEqual(datetime.timedelta(0), result.dst())
+        offset = '-0800'
+        expected_tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=-8),
+                                                   '')
+        expected = datetime.datetime(2016,
+                                     1,
+                                     24,
+                                     22,
+                                     25,
+                                     43,
+                                     tzinfo=expected_tz)
+        result = self.git_dates.timestamp_offset_to_datetime(timestamp, offset)
+        self.assertEqual(expected, result)
+        self.assertEqual(datetime.timedelta(hours=-8), result.utcoffset())
+        self.assertEqual('-0800', result.tzname())
+        self.assertEqual(datetime.timedelta(0), result.dst())
 
-    # Invalid offset.
-    offset = '-08xx'
-    expected_tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=0), '')
-    expected = datetime.datetime(2016, 1, 25, 6, 25, 43, tzinfo=expected_tz)
-    result = self.git_dates.timestamp_offset_to_datetime(timestamp, offset)
-    self.assertEqual(expected, result)
-    self.assertEqual(datetime.timedelta(hours=0), result.utcoffset())
-    self.assertEqual('UTC', result.tzname())
-    self.assertEqual(datetime.timedelta(0), result.dst())
+        # Invalid offset.
+        offset = '-08xx'
+        expected_tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=0),
+                                                   '')
+        expected = datetime.datetime(2016, 1, 25, 6, 25, 43, tzinfo=expected_tz)
+        result = self.git_dates.timestamp_offset_to_datetime(timestamp, offset)
+        self.assertEqual(expected, result)
+        self.assertEqual(datetime.timedelta(hours=0), result.utcoffset())
+        self.assertEqual('UTC', result.tzname())
+        self.assertEqual(datetime.timedelta(0), result.dst())
 
-    # Offset out of range.
-    offset = '+2400'
-    self.assertRaises(ValueError, self.git_dates.timestamp_offset_to_datetime,
-                      timestamp, offset)
+        # Offset out of range.
+        offset = '+2400'
+        self.assertRaises(ValueError,
+                          self.git_dates.timestamp_offset_to_datetime,
+                          timestamp, offset)
 
-  def testDatetimeString(self):
-    tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=11), '')
-    dt = datetime.datetime(2016, 1, 25, 17, 25, 43, tzinfo=tz)
-    self.assertEqual('2016-01-25 17:25:43 +1100',
-                      self.git_dates.datetime_string(dt))
+    def testDatetimeString(self):
+        tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=11), '')
+        dt = datetime.datetime(2016, 1, 25, 17, 25, 43, tzinfo=tz)
+        self.assertEqual('2016-01-25 17:25:43 +1100',
+                         self.git_dates.datetime_string(dt))
 
-    tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=-8), '')
-    dt = datetime.datetime(2016, 1, 24, 22, 25, 43, tzinfo=tz)
-    self.assertEqual('2016-01-24 22:25:43 -0800',
-                      self.git_dates.datetime_string(dt))
+        tz = self.git_dates.FixedOffsetTZ(datetime.timedelta(hours=-8), '')
+        dt = datetime.datetime(2016, 1, 24, 22, 25, 43, tzinfo=tz)
+        self.assertEqual('2016-01-24 22:25:43 -0800',
+                         self.git_dates.datetime_string(dt))
 
 
 if __name__ == '__main__':
-  sys.exit(coverage_utils.covered_main(
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_dates.py')))
+    sys.exit(
+        coverage_utils.covered_main(
+            os.path.join(DEPOT_TOOLS_ROOT, 'git_dates.py')))

+ 34 - 33
tests/git_find_releases_test.py

@@ -18,39 +18,40 @@ import git_find_releases
 
 
 class TestGitFindReleases(unittest.TestCase):
-  @mock.patch('sys.stdout', StringIO())
-  @mock.patch('git_common.run', return_value='')
-  def test_invalid_commit(self, git_run):
-    result = git_find_releases.main(['foo'])
-    self.assertEqual(1, result)
-    self.assertEqual('foo not found', sys.stdout.getvalue().strip())
-    git_run.assert_called_once_with('name-rev', '--tags', '--name-only', 'foo')
-
-  @mock.patch('sys.stdout', StringIO())
-  @mock.patch('git_common.run')
-  def test_no_merge(self, git_run):
-    def git_run_function(*args):
-      assert len(args) > 1
-      if args[0] == 'name-rev' and args[1] == '--tags':
-        return 'undefined'
-
-      if args[0] == 'name-rev' and args[1] == '--refs':
-        return '1.0.0'
-
-      if args[0] == 'log':
-        return ''
-      assert False, "Unexpected arguments for git.run"
-
-    git_run.side_effect = git_run_function
-    result = git_find_releases.main(['foo'])
-    self.assertEqual(0, result)
-    stdout = sys.stdout.getvalue().strip()
-    self.assertIn('commit foo was', stdout)
-    self.assertIn('No merges found', stdout)
-    self.assertEqual(3, git_run.call_count)
+    @mock.patch('sys.stdout', StringIO())
+    @mock.patch('git_common.run', return_value='')
+    def test_invalid_commit(self, git_run):
+        result = git_find_releases.main(['foo'])
+        self.assertEqual(1, result)
+        self.assertEqual('foo not found', sys.stdout.getvalue().strip())
+        git_run.assert_called_once_with('name-rev', '--tags', '--name-only',
+                                        'foo')
+
+    @mock.patch('sys.stdout', StringIO())
+    @mock.patch('git_common.run')
+    def test_no_merge(self, git_run):
+        def git_run_function(*args):
+            assert len(args) > 1
+            if args[0] == 'name-rev' and args[1] == '--tags':
+                return 'undefined'
+
+            if args[0] == 'name-rev' and args[1] == '--refs':
+                return '1.0.0'
+
+            if args[0] == 'log':
+                return ''
+            assert False, "Unexpected arguments for git.run"
+
+        git_run.side_effect = git_run_function
+        result = git_find_releases.main(['foo'])
+        self.assertEqual(0, result)
+        stdout = sys.stdout.getvalue().strip()
+        self.assertIn('commit foo was', stdout)
+        self.assertIn('No merges found', stdout)
+        self.assertEqual(3, git_run.call_count)
 
 
 if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  unittest.main()
+    logging.basicConfig(
+        level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
+    unittest.main()

+ 247 - 243
tests/git_footers_test.py

@@ -1,5 +1,4 @@
 #!/usr/bin/env vpython3
-
 """Tests for git_footers."""
 
 from io import StringIO
@@ -14,255 +13,260 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 import gclient_utils
 import git_footers
 
+
 class GitFootersTest(unittest.TestCase):
-  _message = """
+    _message = """
 This is my commit message. There are many like it, but this one is mine.
 
 My commit message is my best friend. It is my life.
 
 """
 
-  _position = 'refs/heads/main@{#292272}'
-
-  _position_footer = 'Cr-Commit-Position: %s\n' % _position
-
-  def testFootersBasic(self):
-    self.assertEqual(
-        git_footers.split_footers('Not-A: footer'),
-        (['Not-A: footer'], [], []))
-    self.assertEqual(
-        git_footers.split_footers('Header\n\nActual: footer'),
-        (['Header', ''], ['Actual: footer'], [('Actual', 'footer')]))
-    self.assertEqual(
-        git_footers.split_footers('\nActual: footer'),
-        ([''], ['Actual: footer'], [('Actual', 'footer')]))
-    self.assertEqual(
-        git_footers.split_footers('H\n\nBug:\nAlso: footer'),
-        (['H', ''], ['Bug:', 'Also: footer'],
-         [('Bug', ''), ('Also', 'footer')]))
-    self.assertEqual(git_footers.split_footers('H\n\nBug:      '),
-                     (['H', ''], ['Bug:'], [('Bug', '')]))
-    self.assertEqual(git_footers.split_footers('H\n\nBug: 1234     '),
-                     (['H', ''], ['Bug: 1234'], [('Bug', '1234')]))
-    self.assertEqual(
-        git_footers.split_footers('H\n\nBug: 1234\nChange-Id: Ib4321  '),
-        (['H', ''], ['Bug: 1234', 'Change-Id: Ib4321'], [('Bug', '1234'),
-                                                         ('Change-Id', 'Ib4321')
-                                                         ]))
-
-    self.assertEqual(
-        git_footers.parse_footers(self._message), {})
-    self.assertEqual(
-        git_footers.parse_footers(self._message + self._position_footer),
-        { 'Cr-Commit-Position': [ self._position ] })
-    self.assertEqual(
-        git_footers.parse_footers(self._message + self._position_footer
-                                                + self._position_footer),
-        { 'Cr-Commit-Position': [ self._position, self._position ] })
-    self.assertEqual(
-        git_footers.parse_footers(self._message +
-                                  'Bug:\n' +
-                                  self._position_footer),
-        { 'Bug': [''],
-          'Cr-Commit-Position': [ self._position ] })
-
-  def testSkippingBadFooterLines(self):
-    message = ('Title.\n'
-               '\n'
-               'Last: paragraph starts\n'
-               'It-may: contain\n'
-               'bad lines, which should be skipped\n'
-               'For: example\n'
-               '(cherry picked from)\n'
-               'And-only-valid: footers taken')
-    self.assertEqual(git_footers.split_footers(message),
-                     (['Title.',
-                       ''],
-                      ['Last: paragraph starts',
-                       'It-may: contain',
-                       'bad lines, which should be skipped',
-                       'For: example',
-                       '(cherry picked from)',
-                       'And-only-valid: footers taken'],
-                      [('Last', 'paragraph starts'),
-                       ('It-may', 'contain'),
-                       ('For', 'example'),
-                       ('And-only-valid', 'footers taken')]))
-    self.assertEqual(git_footers.parse_footers(message),
-                     {'Last': ['paragraph starts'],
-                      'It-May': ['contain'],
-                      'For': ['example'],
-                      'And-Only-Valid': ['footers taken']})
-
-  def testAvoidingURLs(self):
-    message = ('Someone accidentally put a URL in the footers.\n'
-               '\n'
-               'Followed: by\n'
-               'http://domain.tld\n'
-               'Some: footers')
-    self.assertEqual(git_footers.split_footers(message),
-                     (['Someone accidentally put a URL in the footers.',
-                       ''],
-                      ['Followed: by',
-                       'http://domain.tld',
-                       'Some: footers'],
-                      [('Followed', 'by'),
-                       ('Some', 'footers')]))
-    self.assertEqual(git_footers.parse_footers(message),
-                     {'Followed': ['by'],
-                      'Some': ['footers']})
-
-  def testSplittingLastParagraph(self):
-    message = ('Title.\n'
-               '\n'
-               'The final paragraph has some normal text first.\n'
-               'Followed: by\n'
-               'nonsense trailers and\n'
-               'Some: footers')
-    self.assertEqual(git_footers.split_footers(message),
-                     (['Title.',
-                       '',
-                       'The final paragraph has some normal text first.',
-                       ''],
-                      ['Followed: by',
-                       'nonsense trailers and',
-                       'Some: footers'],
-                      [('Followed', 'by'),
-                       ('Some', 'footers')]))
-    self.assertEqual(git_footers.parse_footers(message),
-                     {'Followed': ['by'],
-                      'Some': ['footers']})
-
-  def testGetFooterChangeId(self):
-    msg = '\n'.join(['whatever',
-                     '',
-                     'Change-Id: ignored',
-                     '',  # Above is ignored because of this empty line.
-                     'Change-Id: Ideadbeaf'])
-    self.assertEqual(['Ideadbeaf'], git_footers.get_footer_change_id(msg))
-    self.assertEqual([], git_footers.get_footer_change_id(
-        'desc\nBUG=not-a-valid-footer\nChange-Id: Ixxx'))
-    self.assertEqual(['Ixxx'], git_footers.get_footer_change_id(
-        'desc\nBUG=not-a-valid-footer\n\nChange-Id: Ixxx'))
-
-  def testAddFooterChangeId(self):
-    with self.assertRaises(AssertionError):
-      git_footers.add_footer_change_id('Already has\n\nChange-Id: Ixxx', 'Izzz')
-
-    self.assertEqual(
-        git_footers.add_footer_change_id('header-only', 'Ixxx'),
-        'header-only\n\nChange-Id: Ixxx')
-
-    self.assertEqual(
-        git_footers.add_footer_change_id('header\n\nsome: footer', 'Ixxx'),
-        'header\n\nsome: footer\nChange-Id: Ixxx')
-
-    self.assertEqual(
-        git_footers.add_footer_change_id('header\n\nBUG: yy', 'Ixxx'),
-        'header\n\nBUG: yy\nChange-Id: Ixxx')
-
-    self.assertEqual(
-        git_footers.add_footer_change_id('header\n\nBUG: yy\nPos: 1', 'Ixxx'),
-        'header\n\nBUG: yy\nChange-Id: Ixxx\nPos: 1')
-
-    self.assertEqual(
-        git_footers.add_footer_change_id('header\n\nBUG: yy\n\nPos: 1', 'Ixxx'),
-        'header\n\nBUG: yy\n\nPos: 1\nChange-Id: Ixxx')
-
-    # Special case: first line is never a footer, even if it looks line one.
-    self.assertEqual(
-        git_footers.add_footer_change_id('header: like footer', 'Ixxx'),
-        'header: like footer\n\nChange-Id: Ixxx')
-
-    self.assertEqual(
-        git_footers.add_footer_change_id('Header.\n\nBug: v8\nN=t\nT=z', 'Ix'),
-        'Header.\n\nBug: v8\nChange-Id: Ix\nN=t\nT=z')
-
-  def testAddFooter(self):
-    with self.assertRaises(ValueError):
-      git_footers.add_footer('', 'Invalid Footer', 'Value')
-
-    self.assertEqual(
-        git_footers.add_footer('', 'Key', 'Value'),
-        '\nKey: Value')
-
-    self.assertEqual(
-        git_footers.add_footer('Header with empty line.\n\n', 'Key', 'Value'),
-        'Header with empty line.\n\nKey: Value')
-
-    self.assertEqual(
-        git_footers.add_footer('Top\n\nSome: footer', 'Key', 'value'),
-        'Top\n\nSome: footer\nKey: value')
-
-    self.assertEqual(
-        git_footers.add_footer('Top\n\nSome: footer', 'Key', 'value',
-                               after_keys=['Any']),
-        'Top\n\nSome: footer\nKey: value')
-
-    self.assertEqual(
-        git_footers.add_footer('Top\n\nSome: footer', 'Key', 'value',
-                               after_keys=['Some']),
-        'Top\n\nSome: footer\nKey: value')
-
-    self.assertEqual(
-         git_footers.add_footer('Top\n\nSome: footer\nOther: footer',
-                                'Key', 'value', after_keys=['Some']),
-         'Top\n\nSome: footer\nKey: value\nOther: footer')
-
-    self.assertEqual(
-         git_footers.add_footer('Top\n\nSome: footer\nOther: footer',
-                                'Key', 'value', before_keys=['Other']),
-         'Top\n\nSome: footer\nKey: value\nOther: footer')
-
-    self.assertEqual(
-        git_footers.add_footer(
-              'Top\n\nSome: footer\nOther: footer\nFinal: footer',
-              'Key', 'value', after_keys=['Some'], before_keys=['Final']),
-        'Top\n\nSome: footer\nKey: value\nOther: footer\nFinal: footer')
-
-    self.assertEqual(
-        git_footers.add_footer(
-              'Top\n\nSome: footer\nOther: footer\nFinal: footer',
-              'Key', 'value', after_keys=['Other'], before_keys=['Some']),
-        'Top\n\nSome: footer\nOther: footer\nKey: value\nFinal: footer')
-
-  def testRemoveFooter(self):
-    self.assertEqual(
-        git_footers.remove_footer('message', 'Key'),
-        'message')
-
-    self.assertEqual(
-        git_footers.remove_footer('message\n\nSome: footer', 'Key'),
-        'message\n\nSome: footer')
-
-    self.assertEqual(
-        git_footers.remove_footer('message\n\nSome: footer\nKey: value', 'Key'),
-        'message\n\nSome: footer')
-
-    self.assertEqual(
-        git_footers.remove_footer(
-            'message\n\nKey: value\nSome: footer\nKey: value', 'Key'),
-        'message\n\nSome: footer')
-
-
-  @mock.patch('sys.stdout', StringIO())
-  @mock.patch(
-      'sys.stdin',
-      StringIO('line\r\notherline\r\n\r\n\r\nFoo: baz\r\nStill: footer'))
-  def testReadStdin(self):
-    self.assertEqual(git_footers.main([]), 0)
-    self.assertEqual(sys.stdout.getvalue(), 'Still: footer\nFoo: baz\n')
-
-  @mock.patch(
-      'sys.stdin',
-      StringIO('line\r\nany spaces\r\n\r\n\r\nFoo: 1\nBar: 2\nFoo: 3'))
-  def testToJson(self):
-    with gclient_utils.temporary_file() as tmp:
-      self.assertEqual(git_footers.main(['--json', tmp]), 0)
-      with open(tmp) as f:
-        js = json.load(f)
-    self.assertEqual(js, {'Foo': ['3', '1'], 'Bar': ['2']})
+    _position = 'refs/heads/main@{#292272}'
+
+    _position_footer = 'Cr-Commit-Position: %s\n' % _position
+
+    def testFootersBasic(self):
+        self.assertEqual(git_footers.split_footers('Not-A: footer'),
+                         (['Not-A: footer'], [], []))
+        self.assertEqual(
+            git_footers.split_footers('Header\n\nActual: footer'),
+            (['Header', ''], ['Actual: footer'], [('Actual', 'footer')]))
+        self.assertEqual(git_footers.split_footers('\nActual: footer'),
+                         ([''], ['Actual: footer'], [('Actual', 'footer')]))
+        self.assertEqual(
+            git_footers.split_footers('H\n\nBug:\nAlso: footer'),
+            (['H', ''], ['Bug:', 'Also: footer'], [('Bug', ''),
+                                                   ('Also', 'footer')]))
+        self.assertEqual(git_footers.split_footers('H\n\nBug:      '),
+                         (['H', ''], ['Bug:'], [('Bug', '')]))
+        self.assertEqual(git_footers.split_footers('H\n\nBug: 1234     '),
+                         (['H', ''], ['Bug: 1234'], [('Bug', '1234')]))
+        self.assertEqual(
+            git_footers.split_footers('H\n\nBug: 1234\nChange-Id: Ib4321  '),
+            (['H', ''], ['Bug: 1234', 'Change-Id: Ib4321'
+                         ], [('Bug', '1234'), ('Change-Id', 'Ib4321')]))
+
+        self.assertEqual(git_footers.parse_footers(self._message), {})
+        self.assertEqual(
+            git_footers.parse_footers(self._message + self._position_footer),
+            {'Cr-Commit-Position': [self._position]})
+        self.assertEqual(
+            git_footers.parse_footers(self._message + self._position_footer +
+                                      self._position_footer),
+            {'Cr-Commit-Position': [self._position, self._position]})
+        self.assertEqual(
+            git_footers.parse_footers(self._message + 'Bug:\n' +
+                                      self._position_footer),
+            {
+                'Bug': [''],
+                'Cr-Commit-Position': [self._position]
+            })
+
+    def testSkippingBadFooterLines(self):
+        message = ('Title.\n'
+                   '\n'
+                   'Last: paragraph starts\n'
+                   'It-may: contain\n'
+                   'bad lines, which should be skipped\n'
+                   'For: example\n'
+                   '(cherry picked from)\n'
+                   'And-only-valid: footers taken')
+        self.assertEqual(git_footers.split_footers(message), (['Title.', ''], [
+            'Last: paragraph starts', 'It-may: contain',
+            'bad lines, which should be skipped', 'For: example',
+            '(cherry picked from)', 'And-only-valid: footers taken'
+        ], [('Last', 'paragraph starts'), ('It-may', 'contain'),
+            ('For', 'example'), ('And-only-valid', 'footers taken')]))
+        self.assertEqual(
+            git_footers.parse_footers(message), {
+                'Last': ['paragraph starts'],
+                'It-May': ['contain'],
+                'For': ['example'],
+                'And-Only-Valid': ['footers taken']
+            })
+
+    def testAvoidingURLs(self):
+        message = ('Someone accidentally put a URL in the footers.\n'
+                   '\n'
+                   'Followed: by\n'
+                   'http://domain.tld\n'
+                   'Some: footers')
+        self.assertEqual(
+            git_footers.split_footers(message),
+            (['Someone accidentally put a URL in the footers.', ''], [
+                'Followed: by', 'http://domain.tld', 'Some: footers'
+            ], [('Followed', 'by'), ('Some', 'footers')]))
+        self.assertEqual(git_footers.parse_footers(message), {
+            'Followed': ['by'],
+            'Some': ['footers']
+        })
+
+    def testSplittingLastParagraph(self):
+        message = ('Title.\n'
+                   '\n'
+                   'The final paragraph has some normal text first.\n'
+                   'Followed: by\n'
+                   'nonsense trailers and\n'
+                   'Some: footers')
+        self.assertEqual(git_footers.split_footers(message), ([
+            'Title.', '', 'The final paragraph has some normal text first.', ''
+        ], ['Followed: by', 'nonsense trailers and', 'Some: footers'
+            ], [('Followed', 'by'), ('Some', 'footers')]))
+        self.assertEqual(git_footers.parse_footers(message), {
+            'Followed': ['by'],
+            'Some': ['footers']
+        })
+
+    def testGetFooterChangeId(self):
+        msg = '\n'.join([
+            'whatever',
+            '',
+            'Change-Id: ignored',
+            '',  # Above is ignored because of this empty line.
+            'Change-Id: Ideadbeaf'
+        ])
+        self.assertEqual(['Ideadbeaf'], git_footers.get_footer_change_id(msg))
+        self.assertEqual([],
+                         git_footers.get_footer_change_id(
+                             'desc\nBUG=not-a-valid-footer\nChange-Id: Ixxx'))
+        self.assertEqual(['Ixxx'],
+                         git_footers.get_footer_change_id(
+                             'desc\nBUG=not-a-valid-footer\n\nChange-Id: Ixxx'))
+
+    def testAddFooterChangeId(self):
+        with self.assertRaises(AssertionError):
+            git_footers.add_footer_change_id('Already has\n\nChange-Id: Ixxx',
+                                             'Izzz')
+
+        self.assertEqual(
+            git_footers.add_footer_change_id('header-only', 'Ixxx'),
+            'header-only\n\nChange-Id: Ixxx')
+
+        self.assertEqual(
+            git_footers.add_footer_change_id('header\n\nsome: footer', 'Ixxx'),
+            'header\n\nsome: footer\nChange-Id: Ixxx')
+
+        self.assertEqual(
+            git_footers.add_footer_change_id('header\n\nBUG: yy', 'Ixxx'),
+            'header\n\nBUG: yy\nChange-Id: Ixxx')
+
+        self.assertEqual(
+            git_footers.add_footer_change_id('header\n\nBUG: yy\nPos: 1',
+                                             'Ixxx'),
+            'header\n\nBUG: yy\nChange-Id: Ixxx\nPos: 1')
+
+        self.assertEqual(
+            git_footers.add_footer_change_id('header\n\nBUG: yy\n\nPos: 1',
+                                             'Ixxx'),
+            'header\n\nBUG: yy\n\nPos: 1\nChange-Id: Ixxx')
+
+        # Special case: first line is never a footer, even if it looks line one.
+        self.assertEqual(
+            git_footers.add_footer_change_id('header: like footer', 'Ixxx'),
+            'header: like footer\n\nChange-Id: Ixxx')
+
+        self.assertEqual(
+            git_footers.add_footer_change_id('Header.\n\nBug: v8\nN=t\nT=z',
+                                             'Ix'),
+            'Header.\n\nBug: v8\nChange-Id: Ix\nN=t\nT=z')
+
+    def testAddFooter(self):
+        with self.assertRaises(ValueError):
+            git_footers.add_footer('', 'Invalid Footer', 'Value')
+
+        self.assertEqual(git_footers.add_footer('', 'Key', 'Value'),
+                         '\nKey: Value')
+
+        self.assertEqual(
+            git_footers.add_footer('Header with empty line.\n\n', 'Key',
+                                   'Value'),
+            'Header with empty line.\n\nKey: Value')
+
+        self.assertEqual(
+            git_footers.add_footer('Top\n\nSome: footer', 'Key', 'value'),
+            'Top\n\nSome: footer\nKey: value')
+
+        self.assertEqual(
+            git_footers.add_footer('Top\n\nSome: footer',
+                                   'Key',
+                                   'value',
+                                   after_keys=['Any']),
+            'Top\n\nSome: footer\nKey: value')
+
+        self.assertEqual(
+            git_footers.add_footer('Top\n\nSome: footer',
+                                   'Key',
+                                   'value',
+                                   after_keys=['Some']),
+            'Top\n\nSome: footer\nKey: value')
+
+        self.assertEqual(
+            git_footers.add_footer('Top\n\nSome: footer\nOther: footer',
+                                   'Key',
+                                   'value',
+                                   after_keys=['Some']),
+            'Top\n\nSome: footer\nKey: value\nOther: footer')
+
+        self.assertEqual(
+            git_footers.add_footer('Top\n\nSome: footer\nOther: footer',
+                                   'Key',
+                                   'value',
+                                   before_keys=['Other']),
+            'Top\n\nSome: footer\nKey: value\nOther: footer')
+
+        self.assertEqual(
+            git_footers.add_footer(
+                'Top\n\nSome: footer\nOther: footer\nFinal: footer',
+                'Key',
+                'value',
+                after_keys=['Some'],
+                before_keys=['Final']),
+            'Top\n\nSome: footer\nKey: value\nOther: footer\nFinal: footer')
+
+        self.assertEqual(
+            git_footers.add_footer(
+                'Top\n\nSome: footer\nOther: footer\nFinal: footer',
+                'Key',
+                'value',
+                after_keys=['Other'],
+                before_keys=['Some']),
+            'Top\n\nSome: footer\nOther: footer\nKey: value\nFinal: footer')
+
+    def testRemoveFooter(self):
+        self.assertEqual(git_footers.remove_footer('message', 'Key'), 'message')
+
+        self.assertEqual(
+            git_footers.remove_footer('message\n\nSome: footer', 'Key'),
+            'message\n\nSome: footer')
+
+        self.assertEqual(
+            git_footers.remove_footer('message\n\nSome: footer\nKey: value',
+                                      'Key'), 'message\n\nSome: footer')
+
+        self.assertEqual(
+            git_footers.remove_footer(
+                'message\n\nKey: value\nSome: footer\nKey: value', 'Key'),
+            'message\n\nSome: footer')
+
+    @mock.patch('sys.stdout', StringIO())
+    @mock.patch(
+        'sys.stdin',
+        StringIO('line\r\notherline\r\n\r\n\r\nFoo: baz\r\nStill: footer'))
+    def testReadStdin(self):
+        self.assertEqual(git_footers.main([]), 0)
+        self.assertEqual(sys.stdout.getvalue(), 'Still: footer\nFoo: baz\n')
+
+    @mock.patch('sys.stdin',
+                StringIO('line\r\nany spaces\r\n\r\n\r\nFoo: 1\nBar: 2\nFoo: 3')
+                )
+    def testToJson(self):
+        with gclient_utils.temporary_file() as tmp:
+            self.assertEqual(git_footers.main(['--json', tmp]), 0)
+            with open(tmp) as f:
+                js = json.load(f)
+        self.assertEqual(js, {'Foo': ['3', '1'], 'Bar': ['2']})
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 647 - 544
tests/git_hyper_blame_test.py

@@ -22,27 +22,30 @@ import gclient_utils
 
 GitRepo = git_test_utils.GitRepo
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class GitHyperBlameTestBase(git_test_utils.GitRepoReadOnlyTestBase):
-  @classmethod
-  def setUpClass(cls):
-    super(GitHyperBlameTestBase, cls).setUpClass()
-    import git_hyper_blame
-    cls.git_hyper_blame = git_hyper_blame
-
-  def setUp(self):
-    mock.patch('sys.stderr', StringIO()).start()
-    self.addCleanup(mock.patch.stopall)
-
-  def run_hyperblame(self, ignored, filename, revision):
-    outbuf = BytesIO()
-    ignored = [self.repo[c] for c in ignored]
-    retval = self.repo.run(
-        self.git_hyper_blame.hyper_blame, outbuf, ignored, filename, revision)
-    return retval, outbuf.getvalue().rstrip().split(b'\n')
-
-  def blame_line(self, commit_name, rest, author=None, filename=None):
-    """Generate a blame line from a commit.
+    @classmethod
+    def setUpClass(cls):
+        super(GitHyperBlameTestBase, cls).setUpClass()
+        import git_hyper_blame
+        cls.git_hyper_blame = git_hyper_blame
+
+    def setUp(self):
+        mock.patch('sys.stderr', StringIO()).start()
+        self.addCleanup(mock.patch.stopall)
+
+    def run_hyperblame(self, ignored, filename, revision):
+        outbuf = BytesIO()
+        ignored = [self.repo[c] for c in ignored]
+        retval = self.repo.run(self.git_hyper_blame.hyper_blame, outbuf,
+                               ignored, filename, revision)
+        return retval, outbuf.getvalue().rstrip().split(b'\n')
+
+    def blame_line(self, commit_name, rest, author=None, filename=None):
+        """Generate a blame line from a commit.
 
     Args:
       commit_name: The commit's schema name.
@@ -50,567 +53,667 @@ class GitHyperBlameTestBase(git_test_utils.GitRepoReadOnlyTestBase):
       author: The author's name. If omitted, reads the name out of the commit.
       filename: The filename. If omitted, not shown in the blame line.
     """
-    short = self.repo[commit_name][:8]
-    start = '%s %s' % (short, filename) if filename else short
-    if author is None:
-      author = self.repo.show_commit(commit_name, format_string='%an %ai')
-    else:
-      author += self.repo.show_commit(commit_name, format_string=' %ai')
-    return ('%s (%s %s' % (start, author, rest)).encode('utf-8')
+        short = self.repo[commit_name][:8]
+        start = '%s %s' % (short, filename) if filename else short
+        if author is None:
+            author = self.repo.show_commit(commit_name, format_string='%an %ai')
+        else:
+            author += self.repo.show_commit(commit_name, format_string=' %ai')
+        return ('%s (%s %s' % (start, author, rest)).encode('utf-8')
+
 
 class GitHyperBlameMainTest(GitHyperBlameTestBase):
-  """End-to-end tests on a very simple repo."""
-  REPO_SCHEMA = "A B C D"
-
-  COMMIT_A = {
-    'some/files/file': {'data': b'line 1\nline 2\n'},
-  }
-
-  COMMIT_B = {
-    'some/files/file': {'data': b'line 1\nline 2.1\n'},
-  }
-
-  COMMIT_C = {
-    'some/files/file': {'data': b'line 1.1\nline 2.1\n'},
-  }
-
-  COMMIT_D = {
-    # This file should be automatically considered for ignore.
-    '.git-blame-ignore-revs': {'data': b'tag_C'},
-    # This file should not be considered.
-    'some/files/.git-blame-ignore-revs': {'data': b'tag_B'},
-  }
-
-  def setUp(self):
-    super(GitHyperBlameMainTest, self).setUp()
-    # Most tests want to check out C (so the .git-blame-ignore-revs is not
-    # used).
-    self.repo.git('checkout', '-f', 'tag_C')
-
-  def testBasicBlame(self):
-    """Tests the main function (simple end-to-end test with no ignores)."""
-    expected_output = [self.blame_line('C', '1) line 1.1'),
-                       self.blame_line('B', '2) line 2.1')]
-    outbuf = BytesIO()
-    retval = self.repo.run(
-        self.git_hyper_blame.main, ['tag_C', 'some/files/file'], outbuf)
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, outbuf.getvalue().rstrip().split(b'\n'))
-    self.assertEqual('', sys.stderr.getvalue())
-
-  def testIgnoreSimple(self):
-    """Tests the main function (simple end-to-end test with ignores)."""
-    expected_output = [self.blame_line('C', ' 1) line 1.1'),
-                       self.blame_line('A', '2*) line 2.1')]
-    outbuf = BytesIO()
-    retval = self.repo.run(
-        self.git_hyper_blame.main, ['-i', 'tag_B', 'tag_C', 'some/files/file'],
-        outbuf)
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, outbuf.getvalue().rstrip().split(b'\n'))
-    self.assertEqual('', sys.stderr.getvalue())
-
-  def testBadRepo(self):
-    """Tests the main function (not in a repo)."""
-    # Make a temp dir that has no .git directory.
-    curdir = os.getcwd()
-    tempdir = tempfile.mkdtemp(suffix='_nogit', prefix='git_repo')
-    try:
-      os.chdir(tempdir)
-      outbuf = BytesIO()
-      retval = self.git_hyper_blame.main(
-          ['-i', 'tag_B', 'tag_C', 'some/files/file'], outbuf)
-    finally:
-      os.chdir(curdir)
-      shutil.rmtree(tempdir)
-
-    self.assertNotEqual(0, retval)
-    self.assertEqual(b'', outbuf.getvalue())
-    r = re.compile('^fatal: Not a git repository', re.I)
-    self.assertRegexpMatches(sys.stderr.getvalue(), r)
-
-  def testBadFilename(self):
-    """Tests the main function (bad filename)."""
-    outbuf = BytesIO()
-    retval = self.repo.run(
-        self.git_hyper_blame.main, ['-i', 'tag_B', 'tag_C', 'some/files/xxxx'],
-        outbuf)
-    self.assertNotEqual(0, retval)
-    self.assertEqual(b'', outbuf.getvalue())
-    # TODO(mgiuca): This test used to test the exact string, but it broke due to
-    # an upstream bug in git-blame. For now, just check the start of the string.
-    # A patch has been sent upstream; when it rolls out we can revert back to
-    # the original test logic.
-    self.assertTrue(
-        sys.stderr.getvalue().startswith(
+    """End-to-end tests on a very simple repo."""
+    REPO_SCHEMA = "A B C D"
+
+    COMMIT_A = {
+        'some/files/file': {
+            'data': b'line 1\nline 2\n'
+        },
+    }
+
+    COMMIT_B = {
+        'some/files/file': {
+            'data': b'line 1\nline 2.1\n'
+        },
+    }
+
+    COMMIT_C = {
+        'some/files/file': {
+            'data': b'line 1.1\nline 2.1\n'
+        },
+    }
+
+    COMMIT_D = {
+        # This file should be automatically considered for ignore.
+        '.git-blame-ignore-revs': {
+            'data': b'tag_C'
+        },
+        # This file should not be considered.
+        'some/files/.git-blame-ignore-revs': {
+            'data': b'tag_B'
+        },
+    }
+
+    def setUp(self):
+        super(GitHyperBlameMainTest, self).setUp()
+        # Most tests want to check out C (so the .git-blame-ignore-revs is not
+        # used).
+        self.repo.git('checkout', '-f', 'tag_C')
+
+    def testBasicBlame(self):
+        """Tests the main function (simple end-to-end test with no ignores)."""
+        expected_output = [
+            self.blame_line('C', '1) line 1.1'),
+            self.blame_line('B', '2) line 2.1')
+        ]
+        outbuf = BytesIO()
+        retval = self.repo.run(self.git_hyper_blame.main,
+                               ['tag_C', 'some/files/file'], outbuf)
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output,
+                         outbuf.getvalue().rstrip().split(b'\n'))
+        self.assertEqual('', sys.stderr.getvalue())
+
+    def testIgnoreSimple(self):
+        """Tests the main function (simple end-to-end test with ignores)."""
+        expected_output = [
+            self.blame_line('C', ' 1) line 1.1'),
+            self.blame_line('A', '2*) line 2.1')
+        ]
+        outbuf = BytesIO()
+        retval = self.repo.run(self.git_hyper_blame.main,
+                               ['-i', 'tag_B', 'tag_C', 'some/files/file'],
+                               outbuf)
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output,
+                         outbuf.getvalue().rstrip().split(b'\n'))
+        self.assertEqual('', sys.stderr.getvalue())
+
+    def testBadRepo(self):
+        """Tests the main function (not in a repo)."""
+        # Make a temp dir that has no .git directory.
+        curdir = os.getcwd()
+        tempdir = tempfile.mkdtemp(suffix='_nogit', prefix='git_repo')
+        try:
+            os.chdir(tempdir)
+            outbuf = BytesIO()
+            retval = self.git_hyper_blame.main(
+                ['-i', 'tag_B', 'tag_C', 'some/files/file'], outbuf)
+        finally:
+            os.chdir(curdir)
+            shutil.rmtree(tempdir)
+
+        self.assertNotEqual(0, retval)
+        self.assertEqual(b'', outbuf.getvalue())
+        r = re.compile('^fatal: Not a git repository', re.I)
+        self.assertRegexpMatches(sys.stderr.getvalue(), r)
+
+    def testBadFilename(self):
+        """Tests the main function (bad filename)."""
+        outbuf = BytesIO()
+        retval = self.repo.run(self.git_hyper_blame.main,
+                               ['-i', 'tag_B', 'tag_C', 'some/files/xxxx'],
+                               outbuf)
+        self.assertNotEqual(0, retval)
+        self.assertEqual(b'', outbuf.getvalue())
+        # TODO(mgiuca): This test used to test the exact string, but it broke
+        # due to an upstream bug in git-blame. For now, just check the start of
+        # the string. A patch has been sent upstream; when it rolls out we can
+        # revert back to the original test logic.
+        self.assertTrue(sys.stderr.getvalue().startswith(
             'fatal: no such path some/files/xxxx in '))
 
-  def testBadRevision(self):
-    """Tests the main function (bad revision to blame from)."""
-    outbuf = BytesIO()
-    retval = self.repo.run(
-        self.git_hyper_blame.main, ['-i', 'tag_B', 'xxxx', 'some/files/file'],
-        outbuf)
-    self.assertNotEqual(0, retval)
-    self.assertEqual(b'', outbuf.getvalue())
-    self.assertRegexpMatches(sys.stderr.getvalue(),
-                             '^fatal: ambiguous argument \'xxxx\': unknown '
-                             'revision or path not in the working tree.')
-
-  def testBadIgnore(self):
-    """Tests the main function (bad revision passed to -i)."""
-    expected_output = [self.blame_line('C', '1) line 1.1'),
-                       self.blame_line('B', '2) line 2.1')]
-    outbuf = BytesIO()
-    retval = self.repo.run(
-        self.git_hyper_blame.main, ['-i', 'xxxx', 'tag_C', 'some/files/file'],
-        outbuf)
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, outbuf.getvalue().rstrip().split(b'\n'))
-    self.assertEqual(
-        'warning: unknown revision \'xxxx\'.\n', sys.stderr.getvalue())
-
-  def testIgnoreFile(self):
-    """Tests passing the ignore list in a file."""
-    expected_output = [self.blame_line('C', ' 1) line 1.1'),
-                       self.blame_line('A', '2*) line 2.1')]
-    outbuf = BytesIO()
-
-    with gclient_utils.temporary_file() as ignore_file:
-      gclient_utils.FileWrite(
-          ignore_file,
-          '# Line comments are allowed.\n'
-          '\n'
-          '{}\n'
-          'xxxx\n'.format(self.repo['B']))
-      retval = self.repo.run(
-          self.git_hyper_blame.main,
-          ['--ignore-file', ignore_file, 'tag_C', 'some/files/file'],
-          outbuf)
-
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, outbuf.getvalue().rstrip().split(b'\n'))
-    self.assertEqual(
-        'warning: unknown revision \'xxxx\'.\n', sys.stderr.getvalue())
-
-  def testDefaultIgnoreFile(self):
-    """Tests automatically using a default ignore list."""
-    # Check out revision D. We expect the script to use the default ignore list
-    # that is checked out, *not* the one committed at the given revision.
-    self.repo.git('checkout', '-f', 'tag_D')
-
-    expected_output = [self.blame_line('A', '1*) line 1.1'),
-                       self.blame_line('B', ' 2) line 2.1')]
-    outbuf = BytesIO()
-
-    retval = self.repo.run(
-        self.git_hyper_blame.main, ['tag_D', 'some/files/file'], outbuf)
-
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, outbuf.getvalue().rstrip().split(b'\n'))
-    self.assertEqual('', sys.stderr.getvalue())
-
-    # Test blame from a different revision. Despite the default ignore file
-    # *not* being committed at that revision, it should still be picked up
-    # because D is currently checked out.
-    outbuf = BytesIO()
-
-    retval = self.repo.run(
-        self.git_hyper_blame.main, ['tag_C', 'some/files/file'], outbuf)
-
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, outbuf.getvalue().rstrip().split(b'\n'))
-    self.assertEqual('', sys.stderr.getvalue())
-
-  def testNoDefaultIgnores(self):
-    """Tests the --no-default-ignores switch."""
-    # Check out revision D. This has a .git-blame-ignore-revs file, which we
-    # expect to be ignored due to --no-default-ignores.
-    self.repo.git('checkout', '-f', 'tag_D')
-
-    expected_output = [self.blame_line('C', '1) line 1.1'),
-                       self.blame_line('B', '2) line 2.1')]
-    outbuf = BytesIO()
-
-    retval = self.repo.run(
-        self.git_hyper_blame.main,
-        ['tag_D', 'some/files/file', '--no-default-ignores'], outbuf)
-
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, outbuf.getvalue().rstrip().split(b'\n'))
-    self.assertEqual('', sys.stderr.getvalue())
+    def testBadRevision(self):
+        """Tests the main function (bad revision to blame from)."""
+        outbuf = BytesIO()
+        retval = self.repo.run(self.git_hyper_blame.main,
+                               ['-i', 'tag_B', 'xxxx', 'some/files/file'],
+                               outbuf)
+        self.assertNotEqual(0, retval)
+        self.assertEqual(b'', outbuf.getvalue())
+        self.assertRegexpMatches(
+            sys.stderr.getvalue(),
+            '^fatal: ambiguous argument \'xxxx\': unknown '
+            'revision or path not in the working tree.')
+
+    def testBadIgnore(self):
+        """Tests the main function (bad revision passed to -i)."""
+        expected_output = [
+            self.blame_line('C', '1) line 1.1'),
+            self.blame_line('B', '2) line 2.1')
+        ]
+        outbuf = BytesIO()
+        retval = self.repo.run(self.git_hyper_blame.main,
+                               ['-i', 'xxxx', 'tag_C', 'some/files/file'],
+                               outbuf)
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output,
+                         outbuf.getvalue().rstrip().split(b'\n'))
+        self.assertEqual('warning: unknown revision \'xxxx\'.\n',
+                         sys.stderr.getvalue())
+
+    def testIgnoreFile(self):
+        """Tests passing the ignore list in a file."""
+        expected_output = [
+            self.blame_line('C', ' 1) line 1.1'),
+            self.blame_line('A', '2*) line 2.1')
+        ]
+        outbuf = BytesIO()
+
+        with gclient_utils.temporary_file() as ignore_file:
+            gclient_utils.FileWrite(
+                ignore_file, '# Line comments are allowed.\n'
+                '\n'
+                '{}\n'
+                'xxxx\n'.format(self.repo['B']))
+            retval = self.repo.run(
+                self.git_hyper_blame.main,
+                ['--ignore-file', ignore_file, 'tag_C', 'some/files/file'],
+                outbuf)
+
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output,
+                         outbuf.getvalue().rstrip().split(b'\n'))
+        self.assertEqual('warning: unknown revision \'xxxx\'.\n',
+                         sys.stderr.getvalue())
+
+    def testDefaultIgnoreFile(self):
+        """Tests automatically using a default ignore list."""
+        # Check out revision D. We expect the script to use the default ignore
+        # list that is checked out, *not* the one committed at the given
+        # revision.
+        self.repo.git('checkout', '-f', 'tag_D')
+
+        expected_output = [
+            self.blame_line('A', '1*) line 1.1'),
+            self.blame_line('B', ' 2) line 2.1')
+        ]
+        outbuf = BytesIO()
+
+        retval = self.repo.run(self.git_hyper_blame.main,
+                               ['tag_D', 'some/files/file'], outbuf)
+
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output,
+                         outbuf.getvalue().rstrip().split(b'\n'))
+        self.assertEqual('', sys.stderr.getvalue())
+
+        # Test blame from a different revision. Despite the default ignore file
+        # *not* being committed at that revision, it should still be picked up
+        # because D is currently checked out.
+        outbuf = BytesIO()
+
+        retval = self.repo.run(self.git_hyper_blame.main,
+                               ['tag_C', 'some/files/file'], outbuf)
+
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output,
+                         outbuf.getvalue().rstrip().split(b'\n'))
+        self.assertEqual('', sys.stderr.getvalue())
+
+    def testNoDefaultIgnores(self):
+        """Tests the --no-default-ignores switch."""
+        # Check out revision D. This has a .git-blame-ignore-revs file, which we
+        # expect to be ignored due to --no-default-ignores.
+        self.repo.git('checkout', '-f', 'tag_D')
+
+        expected_output = [
+            self.blame_line('C', '1) line 1.1'),
+            self.blame_line('B', '2) line 2.1')
+        ]
+        outbuf = BytesIO()
+
+        retval = self.repo.run(
+            self.git_hyper_blame.main,
+            ['tag_D', 'some/files/file', '--no-default-ignores'], outbuf)
+
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output,
+                         outbuf.getvalue().rstrip().split(b'\n'))
+        self.assertEqual('', sys.stderr.getvalue())
+
 
 class GitHyperBlameSimpleTest(GitHyperBlameTestBase):
-  REPO_SCHEMA = """
+    REPO_SCHEMA = """
   A B D E F G H
   A C D
   """
 
-  COMMIT_A = {
-    'some/files/file1': {'data': b'file1'},
-    'some/files/file2': {'data': b'file2'},
-    'some/files/empty': {'data': b''},
-    'some/other/file':  {'data': b'otherfile'},
-  }
-
-  COMMIT_B = {
-    'some/files/file2': {
-      'mode': 0o755,
-      'data': b'file2 - vanilla\n'},
-    'some/files/empty': {'data': b'not anymore'},
-    'some/files/file3': {'data': b'file3'},
-  }
-
-  COMMIT_C = {
-    'some/files/file2': {'data': b'file2 - merged\n'},
-  }
-
-  COMMIT_D = {
-    'some/files/file2': {'data': b'file2 - vanilla\nfile2 - merged\n'},
-  }
-
-  COMMIT_E = {
-    'some/files/file2': {'data': b'file2 - vanilla\nfile_x - merged\n'},
-  }
-
-  COMMIT_F = {
-    'some/files/file2': {'data': b'file2 - vanilla\nfile_y - merged\n'},
-  }
-
-  # Move file2 from files to other.
-  COMMIT_G = {
-    'some/files/file2': {'data': None},
-    'some/other/file2': {'data': b'file2 - vanilla\nfile_y - merged\n'},
-  }
-
-  COMMIT_H = {
-    'some/other/file2': {'data': b'file2 - vanilla\nfile_z - merged\n'},
-  }
-
-  def testBlameError(self):
-    """Tests a blame on a non-existent file."""
-    expected_output = [b'']
-    retval, output = self.run_hyperblame([], 'some/other/file2', 'tag_D')
-    self.assertNotEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-  def testBlameEmpty(self):
-    """Tests a blame of an empty file with no ignores."""
-    expected_output = [b'']
-    retval, output = self.run_hyperblame([], 'some/files/empty', 'tag_A')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-  def testBasicBlame(self):
-    """Tests a basic blame with no ignores."""
-    # Expect to blame line 1 on B, line 2 on C.
-    expected_output = [self.blame_line('B', '1) file2 - vanilla'),
-                       self.blame_line('C', '2) file2 - merged')]
-    retval, output = self.run_hyperblame([], 'some/files/file2', 'tag_D')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-  def testBlameRenamed(self):
-    """Tests a blame with no ignores on a renamed file."""
-    # Expect to blame line 1 on B, line 2 on H.
-    # Because the file has a different name than it had when (some of) these
-    # lines were changed, expect the filenames to be displayed.
-    expected_output = [self.blame_line('B', '1) file2 - vanilla',
-                                       filename='some/files/file2'),
-                       self.blame_line('H', '2) file_z - merged',
-                                       filename='some/other/file2')]
-    retval, output = self.run_hyperblame([], 'some/other/file2', 'tag_H')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-  def testIgnoreSimpleEdits(self):
-    """Tests a blame with simple (line-level changes) commits ignored."""
-    # Expect to blame line 1 on B, line 2 on E.
-    expected_output = [self.blame_line('B', '1) file2 - vanilla'),
-                       self.blame_line('E', '2) file_x - merged')]
-    retval, output = self.run_hyperblame([], 'some/files/file2', 'tag_E')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-    # Ignore E; blame line 1 on B, line 2 on C.
-    expected_output = [self.blame_line('B', ' 1) file2 - vanilla'),
-                       self.blame_line('C', '2*) file_x - merged')]
-    retval, output = self.run_hyperblame(['E'], 'some/files/file2', 'tag_E')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-    # Ignore E and F; blame line 1 on B, line 2 on C.
-    expected_output = [self.blame_line('B', ' 1) file2 - vanilla'),
-                       self.blame_line('C', '2*) file_y - merged')]
-    retval, output = self.run_hyperblame(['E', 'F'], 'some/files/file2',
-                                         'tag_F')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-  def testIgnoreInitialCommit(self):
-    """Tests a blame with the initial commit ignored."""
-    # Ignore A. Expect A to get blamed anyway.
-    expected_output = [self.blame_line('A', '1) file1')]
-    retval, output = self.run_hyperblame(['A'], 'some/files/file1', 'tag_A')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-  def testIgnoreFileAdd(self):
-    """Tests a blame ignoring the commit that added this file."""
-    # Ignore A. Expect A to get blamed anyway.
-    expected_output = [self.blame_line('B', '1) file3')]
-    retval, output = self.run_hyperblame(['B'], 'some/files/file3', 'tag_B')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-  def testIgnoreFilePopulate(self):
-    """Tests a blame ignoring the commit that added data to an empty file."""
-    # Ignore A. Expect A to get blamed anyway.
-    expected_output = [self.blame_line('B', '1) not anymore')]
-    retval, output = self.run_hyperblame(['B'], 'some/files/empty', 'tag_B')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
+    COMMIT_A = {
+        'some/files/file1': {
+            'data': b'file1'
+        },
+        'some/files/file2': {
+            'data': b'file2'
+        },
+        'some/files/empty': {
+            'data': b''
+        },
+        'some/other/file': {
+            'data': b'otherfile'
+        },
+    }
+
+    COMMIT_B = {
+        'some/files/file2': {
+            'mode': 0o755,
+            'data': b'file2 - vanilla\n'
+        },
+        'some/files/empty': {
+            'data': b'not anymore'
+        },
+        'some/files/file3': {
+            'data': b'file3'
+        },
+    }
+
+    COMMIT_C = {
+        'some/files/file2': {
+            'data': b'file2 - merged\n'
+        },
+    }
+
+    COMMIT_D = {
+        'some/files/file2': {
+            'data': b'file2 - vanilla\nfile2 - merged\n'
+        },
+    }
+
+    COMMIT_E = {
+        'some/files/file2': {
+            'data': b'file2 - vanilla\nfile_x - merged\n'
+        },
+    }
+
+    COMMIT_F = {
+        'some/files/file2': {
+            'data': b'file2 - vanilla\nfile_y - merged\n'
+        },
+    }
+
+    # Move file2 from files to other.
+    COMMIT_G = {
+        'some/files/file2': {
+            'data': None
+        },
+        'some/other/file2': {
+            'data': b'file2 - vanilla\nfile_y - merged\n'
+        },
+    }
+
+    COMMIT_H = {
+        'some/other/file2': {
+            'data': b'file2 - vanilla\nfile_z - merged\n'
+        },
+    }
+
+    def testBlameError(self):
+        """Tests a blame on a non-existent file."""
+        expected_output = [b'']
+        retval, output = self.run_hyperblame([], 'some/other/file2', 'tag_D')
+        self.assertNotEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+    def testBlameEmpty(self):
+        """Tests a blame of an empty file with no ignores."""
+        expected_output = [b'']
+        retval, output = self.run_hyperblame([], 'some/files/empty', 'tag_A')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+    def testBasicBlame(self):
+        """Tests a basic blame with no ignores."""
+        # Expect to blame line 1 on B, line 2 on C.
+        expected_output = [
+            self.blame_line('B', '1) file2 - vanilla'),
+            self.blame_line('C', '2) file2 - merged')
+        ]
+        retval, output = self.run_hyperblame([], 'some/files/file2', 'tag_D')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+    def testBlameRenamed(self):
+        """Tests a blame with no ignores on a renamed file."""
+        # Expect to blame line 1 on B, line 2 on H.
+        # Because the file has a different name than it had when (some of) these
+        # lines were changed, expect the filenames to be displayed.
+        expected_output = [
+            self.blame_line('B',
+                            '1) file2 - vanilla',
+                            filename='some/files/file2'),
+            self.blame_line('H',
+                            '2) file_z - merged',
+                            filename='some/other/file2')
+        ]
+        retval, output = self.run_hyperblame([], 'some/other/file2', 'tag_H')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+    def testIgnoreSimpleEdits(self):
+        """Tests a blame with simple (line-level changes) commits ignored."""
+        # Expect to blame line 1 on B, line 2 on E.
+        expected_output = [
+            self.blame_line('B', '1) file2 - vanilla'),
+            self.blame_line('E', '2) file_x - merged')
+        ]
+        retval, output = self.run_hyperblame([], 'some/files/file2', 'tag_E')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+        # Ignore E; blame line 1 on B, line 2 on C.
+        expected_output = [
+            self.blame_line('B', ' 1) file2 - vanilla'),
+            self.blame_line('C', '2*) file_x - merged')
+        ]
+        retval, output = self.run_hyperblame(['E'], 'some/files/file2', 'tag_E')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+        # Ignore E and F; blame line 1 on B, line 2 on C.
+        expected_output = [
+            self.blame_line('B', ' 1) file2 - vanilla'),
+            self.blame_line('C', '2*) file_y - merged')
+        ]
+        retval, output = self.run_hyperblame(['E', 'F'], 'some/files/file2',
+                                             'tag_F')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+    def testIgnoreInitialCommit(self):
+        """Tests a blame with the initial commit ignored."""
+        # Ignore A. Expect A to get blamed anyway.
+        expected_output = [self.blame_line('A', '1) file1')]
+        retval, output = self.run_hyperblame(['A'], 'some/files/file1', 'tag_A')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+    def testIgnoreFileAdd(self):
+        """Tests a blame ignoring the commit that added this file."""
+        # Ignore A. Expect A to get blamed anyway.
+        expected_output = [self.blame_line('B', '1) file3')]
+        retval, output = self.run_hyperblame(['B'], 'some/files/file3', 'tag_B')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+    def testIgnoreFilePopulate(self):
+        """Tests a blame ignoring the commit that added data to an empty file."""
+        # Ignore A. Expect A to get blamed anyway.
+        expected_output = [self.blame_line('B', '1) not anymore')]
+        retval, output = self.run_hyperblame(['B'], 'some/files/empty', 'tag_B')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
 
 class GitHyperBlameLineMotionTest(GitHyperBlameTestBase):
-  REPO_SCHEMA = """
+    REPO_SCHEMA = """
   A B C D E F
   """
 
-  COMMIT_A = {
-    'file':  {'data': b'A\ngreen\nblue\n'},
-  }
-
-  # Change "green" to "yellow".
-  COMMIT_B = {
-    'file': {'data': b'A\nyellow\nblue\n'},
-  }
-
-  # Insert 2 lines at the top,
-  # Change "yellow" to "red".
-  # Insert 1 line at the bottom.
-  COMMIT_C = {
-    'file': {'data': b'X\nY\nA\nred\nblue\nZ\n'},
-  }
-
-  # Insert 2 more lines at the top.
-  COMMIT_D = {
-    'file': {'data': b'earth\nfire\nX\nY\nA\nred\nblue\nZ\n'},
-  }
-
-  # Insert a line before "red", and indent "red" and "blue".
-  COMMIT_E = {
-    'file': {'data': b'earth\nfire\nX\nY\nA\ncolors:\n red\n blue\nZ\n'},
-  }
-
-  # Insert a line between "A" and "colors".
-  COMMIT_F = {
-    'file': {'data': b'earth\nfire\nX\nY\nA\nB\ncolors:\n red\n blue\nZ\n'},
-  }
-
-  def testCacheDiffHunks(self):
-    """Tests the cache_diff_hunks internal function."""
-    expected_hunks = [((0, 0), (1, 2)),
-                      ((2, 1), (4, 1)),
-                      ((3, 0), (6, 1)),
-                      ]
-    hunks = self.repo.run(self.git_hyper_blame.cache_diff_hunks, 'tag_B',
-                          'tag_C')
-    self.assertEqual(expected_hunks, hunks)
-
-  def testApproxLinenoAcrossRevs(self):
-    """Tests the approx_lineno_across_revs internal function."""
-    # Note: For all of these tests, the "old revision" and "new revision" are
-    # reversed, which matches the usage by hyper_blame.
-
-    # Test an unchanged line before any hunks in the diff. Should be unchanged.
-    lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
-                           'file', 'file', 'tag_B', 'tag_A', 1)
-    self.assertEqual(1, lineno)
-
-    # Test an unchanged line after all hunks in the diff. Should be matched to
-    # the line's previous position in the file.
-    lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
-                           'file', 'file', 'tag_D', 'tag_C', 6)
-    self.assertEqual(4, lineno)
-
-    # Test a line added in a new hunk. Should be matched to the line *before*
-    # where the hunk was inserted in the old version of the file.
-    lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
-                           'file', 'file', 'tag_F', 'tag_E', 6)
-    self.assertEqual(5, lineno)
-
-    # Test lines added in a new hunk at the very start of the file. This tests
-    # an edge case: normally it would be matched to the line *before* where the
-    # hunk was inserted (Line 0), but since the hunk is at the start of the
-    # file, we match to Line 1.
-    lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
-                           'file', 'file', 'tag_C', 'tag_B', 1)
-    self.assertEqual(1, lineno)
-    lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
-                           'file', 'file', 'tag_C', 'tag_B', 2)
-    self.assertEqual(1, lineno)
-
-    # Test an unchanged line in between hunks in the diff. Should be matched to
-    # the line's previous position in the file.
-    lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
-                           'file', 'file', 'tag_C', 'tag_B', 3)
-    self.assertEqual(1, lineno)
-
-    # Test a changed line. Should be matched to the hunk's previous position in
-    # the file.
-    lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
-                           'file', 'file', 'tag_C', 'tag_B', 4)
-    self.assertEqual(2, lineno)
-
-    # Test a line added in a new hunk at the very end of the file. Should be
-    # matched to the line *before* where the hunk was inserted (the last line of
-    # the file). Technically same as the case above but good to boundary test.
-    lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
-                           'file', 'file', 'tag_C', 'tag_B', 6)
-    self.assertEqual(3, lineno)
-
-  def testInterHunkLineMotion(self):
-    """Tests a blame with line motion in another hunk in the ignored commit."""
-    # Blame from D, ignoring C.
-
-    # Lines 1, 2 were added by D.
-    # Lines 3, 4 were added by C (but ignored, so blame A).
-    # Line 5 was added by A.
-    # Line 6 was modified by C (but ignored, so blame B). (Note: This requires
-    # the algorithm to figure out that Line 6 in D == Line 4 in C ~= Line 2 in
-    # B, so it blames B. Otherwise, it would blame A.)
-    # Line 7 was added by A.
-    # Line 8 was added by C (but ignored, so blame A).
-    expected_output = [self.blame_line('D', ' 1) earth'),
-                       self.blame_line('D', ' 2) fire'),
-                       self.blame_line('A', '3*) X'),
-                       self.blame_line('A', '4*) Y'),
-                       self.blame_line('A', ' 5) A'),
-                       self.blame_line('B', '6*) red'),
-                       self.blame_line('A', ' 7) blue'),
-                       self.blame_line('A', '8*) Z'),
-                       ]
-    retval, output = self.run_hyperblame(['C'], 'file', 'tag_D')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
-
-  def testIntraHunkLineMotion(self):
-    """Tests a blame with line motion in the same hunk in the ignored commit."""
-    # This test was mostly written as a demonstration of the limitations of the
-    # current algorithm (it exhibits non-ideal behaviour).
-
-    # Blame from E, ignoring E.
-    # Line 6 was added by E (but ignored, so blame C).
-    # Lines 7, 8 were modified by E (but ignored, so blame A).
-    # TODO(mgiuca): Ideally, this would blame Line 7 on C, because the line
-    # "red" was added by C, and this is just a small change to that line. But
-    # the current algorithm can't deal with line motion within a hunk, so it
-    # just assumes Line 7 in E ~= Line 7 in D == Line 3 in A (which was "blue").
-    expected_output = [self.blame_line('D', ' 1) earth'),
-                       self.blame_line('D', ' 2) fire'),
-                       self.blame_line('C', ' 3) X'),
-                       self.blame_line('C', ' 4) Y'),
-                       self.blame_line('A', ' 5) A'),
-                       self.blame_line('C', '6*) colors:'),
-                       self.blame_line('A', '7*)  red'),
-                       self.blame_line('A', '8*)  blue'),
-                       self.blame_line('C', ' 9) Z'),
-                       ]
-    retval, output = self.run_hyperblame(['E'], 'file', 'tag_E')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
+    COMMIT_A = {
+        'file': {
+            'data': b'A\ngreen\nblue\n'
+        },
+    }
+
+    # Change "green" to "yellow".
+    COMMIT_B = {
+        'file': {
+            'data': b'A\nyellow\nblue\n'
+        },
+    }
+
+    # Insert 2 lines at the top,
+    # Change "yellow" to "red".
+    # Insert 1 line at the bottom.
+    COMMIT_C = {
+        'file': {
+            'data': b'X\nY\nA\nred\nblue\nZ\n'
+        },
+    }
+
+    # Insert 2 more lines at the top.
+    COMMIT_D = {
+        'file': {
+            'data': b'earth\nfire\nX\nY\nA\nred\nblue\nZ\n'
+        },
+    }
+
+    # Insert a line before "red", and indent "red" and "blue".
+    COMMIT_E = {
+        'file': {
+            'data': b'earth\nfire\nX\nY\nA\ncolors:\n red\n blue\nZ\n'
+        },
+    }
+
+    # Insert a line between "A" and "colors".
+    COMMIT_F = {
+        'file': {
+            'data': b'earth\nfire\nX\nY\nA\nB\ncolors:\n red\n blue\nZ\n'
+        },
+    }
+
+    def testCacheDiffHunks(self):
+        """Tests the cache_diff_hunks internal function."""
+        expected_hunks = [
+            ((0, 0), (1, 2)),
+            ((2, 1), (4, 1)),
+            ((3, 0), (6, 1)),
+        ]
+        hunks = self.repo.run(self.git_hyper_blame.cache_diff_hunks, 'tag_B',
+                              'tag_C')
+        self.assertEqual(expected_hunks, hunks)
+
+    def testApproxLinenoAcrossRevs(self):
+        """Tests the approx_lineno_across_revs internal function."""
+        # Note: For all of these tests, the "old revision" and "new revision"
+        # are reversed, which matches the usage by hyper_blame.
+
+        # Test an unchanged line before any hunks in the diff. Should be
+        # unchanged.
+        lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
+                               'file', 'file', 'tag_B', 'tag_A', 1)
+        self.assertEqual(1, lineno)
+
+        # Test an unchanged line after all hunks in the diff. Should be matched
+        # to the line's previous position in the file.
+        lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
+                               'file', 'file', 'tag_D', 'tag_C', 6)
+        self.assertEqual(4, lineno)
+
+        # Test a line added in a new hunk. Should be matched to the line
+        # *before* where the hunk was inserted in the old version of the file.
+        lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
+                               'file', 'file', 'tag_F', 'tag_E', 6)
+        self.assertEqual(5, lineno)
+
+        # Test lines added in a new hunk at the very start of the file. This
+        # tests an edge case: normally it would be matched to the line *before*
+        # where the hunk was inserted (Line 0), but since the hunk is at the
+        # start of the file, we match to Line 1.
+        lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
+                               'file', 'file', 'tag_C', 'tag_B', 1)
+        self.assertEqual(1, lineno)
+        lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
+                               'file', 'file', 'tag_C', 'tag_B', 2)
+        self.assertEqual(1, lineno)
+
+        # Test an unchanged line in between hunks in the diff. Should be matched
+        # to the line's previous position in the file.
+        lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
+                               'file', 'file', 'tag_C', 'tag_B', 3)
+        self.assertEqual(1, lineno)
+
+        # Test a changed line. Should be matched to the hunk's previous position
+        # in the file.
+        lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
+                               'file', 'file', 'tag_C', 'tag_B', 4)
+        self.assertEqual(2, lineno)
+
+        # Test a line added in a new hunk at the very end of the file. Should be
+        # matched to the line *before* where the hunk was inserted (the last
+        # line of the file). Technically same as the case above but good to
+        # boundary test.
+        lineno = self.repo.run(self.git_hyper_blame.approx_lineno_across_revs,
+                               'file', 'file', 'tag_C', 'tag_B', 6)
+        self.assertEqual(3, lineno)
+
+    def testInterHunkLineMotion(self):
+        """Tests a blame with line motion in another hunk in the ignored commit."""
+        # Blame from D, ignoring C.
+
+        # Lines 1, 2 were added by D.
+        # Lines 3, 4 were added by C (but ignored, so blame A).
+        # Line 5 was added by A.
+        # Line 6 was modified by C (but ignored, so blame B). (Note: This
+        # requires the algorithm to figure out that Line 6 in D == Line 4 in C
+        # ~= Line 2 in B, so it blames B. Otherwise, it would blame A.) Line 7
+        # was added by A. Line 8 was added by C (but ignored, so blame A).
+        expected_output = [
+            self.blame_line('D', ' 1) earth'),
+            self.blame_line('D', ' 2) fire'),
+            self.blame_line('A', '3*) X'),
+            self.blame_line('A', '4*) Y'),
+            self.blame_line('A', ' 5) A'),
+            self.blame_line('B', '6*) red'),
+            self.blame_line('A', ' 7) blue'),
+            self.blame_line('A', '8*) Z'),
+        ]
+        retval, output = self.run_hyperblame(['C'], 'file', 'tag_D')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
+
+    def testIntraHunkLineMotion(self):
+        """Tests a blame with line motion in the same hunk in the ignored commit."""
+        # This test was mostly written as a demonstration of the limitations of
+        # the current algorithm (it exhibits non-ideal behaviour).
+
+        # Blame from E, ignoring E.
+        # Line 6 was added by E (but ignored, so blame C).
+        # Lines 7, 8 were modified by E (but ignored, so blame A).
+        # TODO(mgiuca): Ideally, this would blame Line 7 on C, because the line
+        # "red" was added by C, and this is just a small change to that line.
+        # But the current algorithm can't deal with line motion within a hunk,
+        # so it just assumes Line 7 in E ~= Line 7 in D == Line 3 in A (which
+        # was "blue").
+        expected_output = [
+            self.blame_line('D', ' 1) earth'),
+            self.blame_line('D', ' 2) fire'),
+            self.blame_line('C', ' 3) X'),
+            self.blame_line('C', ' 4) Y'),
+            self.blame_line('A', ' 5) A'),
+            self.blame_line('C', '6*) colors:'),
+            self.blame_line('A', '7*)  red'),
+            self.blame_line('A', '8*)  blue'),
+            self.blame_line('C', ' 9) Z'),
+        ]
+        retval, output = self.run_hyperblame(['E'], 'file', 'tag_E')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
 
 
 class GitHyperBlameLineNumberTest(GitHyperBlameTestBase):
-  REPO_SCHEMA = """
+    REPO_SCHEMA = """
   A B C D
   """
 
-  COMMIT_A = {
-    'file':  {'data': b'red\nblue\n'},
-  }
-
-  # Change "blue" to "green".
-  COMMIT_B = {
-    'file': {'data': b'red\ngreen\n'},
-  }
-
-  # Insert 2 lines at the top,
-  COMMIT_C = {
-    'file': {'data': b'\n\nred\ngreen\n'},
-  }
-
-  # Change "green" to "yellow".
-  COMMIT_D = {
-    'file': {'data': b'\n\nred\nyellow\n'},
-  }
-
-  def testTwoChangesWithAddedLines(self):
-    """Regression test for https://crbug.com/709831.
+    COMMIT_A = {
+        'file': {
+            'data': b'red\nblue\n'
+        },
+    }
+
+    # Change "blue" to "green".
+    COMMIT_B = {
+        'file': {
+            'data': b'red\ngreen\n'
+        },
+    }
+
+    # Insert 2 lines at the top,
+    COMMIT_C = {
+        'file': {
+            'data': b'\n\nred\ngreen\n'
+        },
+    }
+
+    # Change "green" to "yellow".
+    COMMIT_D = {
+        'file': {
+            'data': b'\n\nred\nyellow\n'
+        },
+    }
+
+    def testTwoChangesWithAddedLines(self):
+        """Regression test for https://crbug.com/709831.
 
     Tests a line with multiple ignored edits, and a line number change in
     between (such that the line number in the current revision is bigger than
     the file's line count at the older ignored revision).
     """
-    expected_output = [self.blame_line('C', ' 1) '),
-                       self.blame_line('C', ' 2) '),
-                       self.blame_line('A', ' 3) red'),
-                       self.blame_line('A', '4*) yellow'),
-                       ]
-    # Due to https://crbug.com/709831, ignoring both B and D would crash,
-    # because of C (in between those revisions) which moves Line 2 to Line 4.
-    # The algorithm would incorrectly think that Line 4 was still on Line 4 in
-    # Commit B, even though it was Line 2 at that time. Its index is out of
-    # range in the number of lines in Commit B.
-    retval, output = self.run_hyperblame(['B', 'D'], 'file', 'tag_D')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
+        expected_output = [
+            self.blame_line('C', ' 1) '),
+            self.blame_line('C', ' 2) '),
+            self.blame_line('A', ' 3) red'),
+            self.blame_line('A', '4*) yellow'),
+        ]
+        # Due to https://crbug.com/709831, ignoring both B and D would crash,
+        # because of C (in between those revisions) which moves Line 2 to Line
+        # 4. The algorithm would incorrectly think that Line 4 was still on Line
+        # 4 in Commit B, even though it was Line 2 at that time. Its index is
+        # out of range in the number of lines in Commit B.
+        retval, output = self.run_hyperblame(['B', 'D'], 'file', 'tag_D')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
 
 
 class GitHyperBlameUnicodeTest(GitHyperBlameTestBase):
-  REPO_SCHEMA = """
+    REPO_SCHEMA = """
   A B C
   """
 
-  COMMIT_A = {
-    GitRepo.AUTHOR_NAME: 'ASCII Author',
-    'file':  {'data': b'red\nblue\n'},
-  }
-
-  # Add a line.
-  COMMIT_B = {
-      GitRepo.AUTHOR_NAME: '\u4e2d\u56fd\u4f5c\u8005',
-      'file': {
-          'data': b'red\ngreen\nblue\n'
-      },
-  }
-
-  # Modify a line with non-UTF-8 author and file text.
-  COMMIT_C = {
-      GitRepo.AUTHOR_NAME: 'Lat\xedn-1 Author',
-      'file': {
-          'data': 'red\ngre\u00e9n\nblue\n'.encode('latin-1')
-      },
-  }
-
-  def testNonUTF8Data(self):
-    """Ensures correct behaviour even if author or file data is not UTF-8.
+    COMMIT_A = {
+        GitRepo.AUTHOR_NAME: 'ASCII Author',
+        'file': {
+            'data': b'red\nblue\n'
+        },
+    }
+
+    # Add a line.
+    COMMIT_B = {
+        GitRepo.AUTHOR_NAME: '\u4e2d\u56fd\u4f5c\u8005',
+        'file': {
+            'data': b'red\ngreen\nblue\n'
+        },
+    }
+
+    # Modify a line with non-UTF-8 author and file text.
+    COMMIT_C = {
+        GitRepo.AUTHOR_NAME: 'Lat\xedn-1 Author',
+        'file': {
+            'data': 'red\ngre\u00e9n\nblue\n'.encode('latin-1')
+        },
+    }
+
+    def testNonUTF8Data(self):
+        """Ensures correct behaviour even if author or file data is not UTF-8.
 
     There is no guarantee that a file will be UTF-8-encoded, so this is
     realistic.
     """
-    expected_output = [
-        self.blame_line('A', '1) red', author='ASCII Author  '),
-        # The Author has been re-encoded as UTF-8. The file data is converted to
-        # UTF8 and unknown characters replaced.
-        self.blame_line('C', '2) gre\ufffdn', author='Lat\xedn-1 Author'),
-        self.blame_line('A', '3) blue', author='ASCII Author  '),
-    ]
-    retval, output = self.run_hyperblame([], 'file', 'tag_C')
-    self.assertEqual(0, retval)
-    self.assertEqual(expected_output, output)
+        expected_output = [
+            self.blame_line('A', '1) red', author='ASCII Author  '),
+            # The Author has been re-encoded as UTF-8. The file data is
+            # converted to UTF8 and unknown characters replaced.
+            self.blame_line('C', '2) gre\ufffdn', author='Lat\xedn-1 Author'),
+            self.blame_line('A', '3) blue', author='ASCII Author  '),
+        ]
+        retval, output = self.run_hyperblame([], 'file', 'tag_C')
+        self.assertEqual(0, retval)
+        self.assertEqual(expected_output, output)
 
 
 if __name__ == '__main__':
-  sys.exit(coverage_utils.covered_main(
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_hyper_blame.py')))
+    sys.exit(
+        coverage_utils.covered_main(
+            os.path.join(DEPOT_TOOLS_ROOT, 'git_hyper_blame.py')))

+ 78 - 75
tests/git_map_test.py

@@ -20,98 +20,101 @@ from testing_support import git_test_utils
 import git_map
 import git_common
 
-
 git_common.TEST_MODE = True
 GitRepo = git_test_utils.GitRepo
 
+
 class GitMapTest(git_test_utils.GitRepoReadOnlyTestBase):
-  REPO_SCHEMA = """"
+    REPO_SCHEMA = """"
   A B C D 😋 F G
     B H I J K
           J L
   """
 
-  def setUp(self):
-    # Include branch_K, branch_L to make sure that ABCDEFG all get the
-    # same commit hashes as self.repo. Otherwise they get committed with the
-    # wrong timestamps, due to commit ordering.
-    # TODO(iannucci): Make commit timestamps deterministic in left to right, top
-    #                 to bottom order, not in lexi-topographical order.
-    origin_schema = git_test_utils.GitRepoSchema("""
+    def setUp(self):
+        # Include branch_K, branch_L to make sure that ABCDEFG all get the
+        # same commit hashes as self.repo. Otherwise they get committed with the
+        # wrong timestamps, due to commit ordering.
+        # TODO(iannucci): Make commit timestamps deterministic in left to right,
+        # top to bottom order, not in lexi-topographical order.
+        origin_schema = git_test_utils.GitRepoSchema(
+            """
     A B C D 😋 F G M N O
       B H I J K
             J L
     """, self.getRepoContent)
-    self.origin = origin_schema.reify()
-    self.origin.git('checkout', 'main')
-    self.origin.git('branch', '-d', *['branch_'+l for l in 'KLG'])
+        self.origin = origin_schema.reify()
+        self.origin.git('checkout', 'main')
+        self.origin.git('branch', '-d', *['branch_' + l for l in 'KLG'])
 
-    self.repo.git('remote', 'add', 'origin', self.origin.repo_path)
-    self.repo.git('config', '--add', 'remote.origin.fetch',
-                  '+refs/tags/*:refs/tags/*')
-    self.repo.git('update-ref', 'refs/remotes/origin/main', 'tag_E')
-    self.repo.git('branch', '--set-upstream-to', 'branch_G', 'branch_K')
-    self.repo.git('branch', '--set-upstream-to', 'branch_K', 'branch_L')
+        self.repo.git('remote', 'add', 'origin', self.origin.repo_path)
+        self.repo.git('config', '--add', 'remote.origin.fetch',
+                      '+refs/tags/*:refs/tags/*')
+        self.repo.git('update-ref', 'refs/remotes/origin/main', 'tag_E')
+        self.repo.git('branch', '--set-upstream-to', 'branch_G', 'branch_K')
+        self.repo.git('branch', '--set-upstream-to', 'branch_K', 'branch_L')
 
-    self.repo.git('fetch', 'origin')
-    mock.patch('git_map.RESET', '').start()
-    mock.patch('git_map.BLUE_BACK', '').start()
-    mock.patch('git_map.BRIGHT_RED', '').start()
-    mock.patch('git_map.CYAN', '').start()
-    mock.patch('git_map.GREEN', '').start()
-    mock.patch('git_map.MAGENTA', '').start()
-    mock.patch('git_map.RED', '').start()
-    mock.patch('git_map.WHITE', '').start()
-    mock.patch('git_map.YELLOW', '').start()
-    self.addCleanup(mock.patch.stopall)
+        self.repo.git('fetch', 'origin')
+        mock.patch('git_map.RESET', '').start()
+        mock.patch('git_map.BLUE_BACK', '').start()
+        mock.patch('git_map.BRIGHT_RED', '').start()
+        mock.patch('git_map.CYAN', '').start()
+        mock.patch('git_map.GREEN', '').start()
+        mock.patch('git_map.MAGENTA', '').start()
+        mock.patch('git_map.RED', '').start()
+        mock.patch('git_map.WHITE', '').start()
+        mock.patch('git_map.YELLOW', '').start()
+        self.addCleanup(mock.patch.stopall)
 
-  def testHelp(self):
-    outbuf = io.BytesIO()
-    self.repo.run(git_map.main, ['-h'], outbuf)
-    self.assertIn(b'usage: git map [-h] [--help] [<args>]', outbuf.getvalue())
+    def testHelp(self):
+        outbuf = io.BytesIO()
+        self.repo.run(git_map.main, ['-h'], outbuf)
+        self.assertIn(b'usage: git map [-h] [--help] [<args>]',
+                      outbuf.getvalue())
 
-  def testGitMap(self):
-    expected = os.linesep.join([
-        '* 6e85e877ea	(tag_O, origin/main, origin/branch_O) 1970-01-30 ~ O',
-        '* 4705470871	(tag_N) 1970-01-28 ~ N',
-        '* 8761b1a94f	(tag_M) 1970-01-26 ~ M',
-        '* 5e7ce08691	(tag_G) 1970-01-24 ~ G',
-        '* 78543ed411	(tag_F) 1970-01-18 ~ F',
-        '* f5c2b77013	(tag_😋) 1970-01-16 ~ 😋',
-        '* 5249c43079	(tag_D) 1970-01-10 ~ D',
-        '* 072ade676a	(tag_C) 1970-01-06 ~ C',
-        '| * e77da937d5	(branch_G) 1970-01-26 ~ G',
-        '| * acda9677fd	1970-01-20 ~ F',
-        '| * b4bed3c8e1	1970-01-18 ~ 😋',
-        '| * 5da071fda9	1970-01-12 ~ D',
-        '| * 1ef9b2e4ca	1970-01-08 ~ C',
-        '| | * ddd611f619	(branch_L) 1970-01-24 ~ L',
-        '| | | * f07cbd8cfc	(branch_K) 1970-01-22 ~ K',
-        '| | |/  ',
-        '| | * fb7da24708	1970-01-16 ~ J    <(branch_L)',
-        '| | * bb168f6d65	1970-01-14 ~ I',
-        '| | * ee1032effa	1970-01-10 ~ H',
-        '| |/  ',
-        '| * db57edd2c0	1970-01-06 ~ B    <(branch_K)',
-        '| * e4f775f844	(root_A) 1970-01-04 ~ A',
-        '| * 2824d6d8b6	(tag_L, origin/branch_L) 1970-01-22 ~ L',
-        '| | * 4e599306f0	(tag_K, origin/branch_K) 1970-01-20 ~ K',
-        '| |/  ',
-        '| * 332f1b4499	(tag_J) 1970-01-14 ~ J',
-        '| * 2fc0bc5ee5	(tag_I) 1970-01-12 ~ I',
-        '| * 6e0ab26451	(tag_H) 1970-01-08 ~ H',
-        '|/  ',
-        '* 315457dbe8	(tag_B) 1970-01-04 ~ B',
-        '* cd589e62d8	(tag_A, origin/root_A) 1970-01-02 ~ A',
-        '* 7026d3d68e	(tag_", root_", main, branch_") 1970-01-02 ~ "',
-    ])
-    outbuf = io.BytesIO()
-    self.repo.run(git_map.main, [], outbuf)
-    output = outbuf.getvalue()
-    output = re.sub(br'.\[\d\dm', b'', output)
-    output = re.sub(br'.\[m', b'', output)
-    self.assertEqual(output.splitlines(), expected.encode('utf-8').splitlines())
+    def testGitMap(self):
+        expected = os.linesep.join([
+            '* 6e85e877ea	(tag_O, origin/main, origin/branch_O) 1970-01-30 ~ O',
+            '* 4705470871	(tag_N) 1970-01-28 ~ N',
+            '* 8761b1a94f	(tag_M) 1970-01-26 ~ M',
+            '* 5e7ce08691	(tag_G) 1970-01-24 ~ G',
+            '* 78543ed411	(tag_F) 1970-01-18 ~ F',
+            '* f5c2b77013	(tag_😋) 1970-01-16 ~ 😋',
+            '* 5249c43079	(tag_D) 1970-01-10 ~ D',
+            '* 072ade676a	(tag_C) 1970-01-06 ~ C',
+            '| * e77da937d5	(branch_G) 1970-01-26 ~ G',
+            '| * acda9677fd	1970-01-20 ~ F',
+            '| * b4bed3c8e1	1970-01-18 ~ 😋',
+            '| * 5da071fda9	1970-01-12 ~ D',
+            '| * 1ef9b2e4ca	1970-01-08 ~ C',
+            '| | * ddd611f619	(branch_L) 1970-01-24 ~ L',
+            '| | | * f07cbd8cfc	(branch_K) 1970-01-22 ~ K',
+            '| | |/  ',
+            '| | * fb7da24708	1970-01-16 ~ J    <(branch_L)',
+            '| | * bb168f6d65	1970-01-14 ~ I',
+            '| | * ee1032effa	1970-01-10 ~ H',
+            '| |/  ',
+            '| * db57edd2c0	1970-01-06 ~ B    <(branch_K)',
+            '| * e4f775f844	(root_A) 1970-01-04 ~ A',
+            '| * 2824d6d8b6	(tag_L, origin/branch_L) 1970-01-22 ~ L',
+            '| | * 4e599306f0	(tag_K, origin/branch_K) 1970-01-20 ~ K',
+            '| |/  ',
+            '| * 332f1b4499	(tag_J) 1970-01-14 ~ J',
+            '| * 2fc0bc5ee5	(tag_I) 1970-01-12 ~ I',
+            '| * 6e0ab26451	(tag_H) 1970-01-08 ~ H',
+            '|/  ',
+            '* 315457dbe8	(tag_B) 1970-01-04 ~ B',
+            '* cd589e62d8	(tag_A, origin/root_A) 1970-01-02 ~ A',
+            '* 7026d3d68e	(tag_", root_", main, branch_") 1970-01-02 ~ "',
+        ])
+        outbuf = io.BytesIO()
+        self.repo.run(git_map.main, [], outbuf)
+        output = outbuf.getvalue()
+        output = re.sub(br'.\[\d\dm', b'', output)
+        output = re.sub(br'.\[m', b'', output)
+        self.assertEqual(output.splitlines(),
+                         expected.encode('utf-8').splitlines())
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 77 - 76
tests/git_migrate_default_branch_test.py

@@ -16,82 +16,83 @@ import git_migrate_default_branch
 
 
 class CMDFormatTestCase(unittest.TestCase):
-  def setUp(self):
-    self.addCleanup(mock.patch.stopall)
-
-  def test_no_remote(self):
-    def RunMock(*args):
-      if args[0] == 'remote':
-        return ''
-      self.fail('did not expect such run git command: %s' % args[0])
-
-    mock.patch('git_migrate_default_branch.git_common.run', RunMock).start()
-    with self.assertRaisesRegexp(RuntimeError, 'Could not find any remote'):
-      git_migrate_default_branch.main()
-
-  def test_migration_not_ready(self):
-    def RunMock(*args):
-      if args[0] == 'remote':
-        return 'origin\ngerrit'
-      raise Exception('Did not expect such run git command: %s' % args[0])
-
-    mock.patch('git_migrate_default_branch.git_common.run', RunMock).start()
-    mock.patch('git_migrate_default_branch.git_common.repo_root',
-               return_value='.').start()
-    mock.patch('git_migrate_default_branch.scm.GIT.GetConfig',
-               return_value='https://chromium.googlesource.com').start()
-    mock.patch('git_migrate_default_branch.gerrit_util.GetProjectHead',
-               return_value=None).start()
-    with self.assertRaisesRegexp(RuntimeError, 'not migrated yet'):
-      git_migrate_default_branch.main()
-
-  def test_migration_no_master(self):
-    def RunMock(*args):
-      if args[0] == 'remote':
-        return 'origin\ngerrit'
-
-      if args[0] == 'fetch':
-        return
-
-      if args[0] == 'branch':
-        return
-
-      if args[0] == 'config':
-        return
-      raise Exception('Did not expect such run git command: %s' % args[0])
-
-    mock_runs = mock.patch('git_migrate_default_branch.git_common.run',
-                           side_effect=RunMock).start()
-    mock.patch('git_migrate_default_branch.git_common.repo_root',
-               return_value='.').start()
-    mock.patch('git_migrate_default_branch.scm.GIT.GetConfig',
-               return_value='https://chromium.googlesource.com').start()
-    mock.patch('git_migrate_default_branch.gerrit_util.GetProjectHead',
-               return_value='refs/heads/main').start()
-
-    BranchesInfo = collections.namedtuple('BranchesInfo',
-                                          'hash upstream commits behind')
-    branches = {
-        '': None,  # always returned
-        'master': BranchesInfo('0000', 'origin/master', '0', '0'),
-        'feature': BranchesInfo('0000', 'master', '0', '0'),
-        'another_feature': BranchesInfo('0000', 'feature', '0', '0'),
-        'remote_feature': BranchesInfo('0000', 'origin/master', '0', '0'),
-    }
-    mock.patch('git_migrate_default_branch.git_common.get_branches_info',
-               return_value=branches).start()
-    mock_merge_base = mock.patch(
-        'git_migrate_default_branch.git_common.remove_merge_base',
-        return_value=branches).start()
-
-    git_migrate_default_branch.main()
-    mock_merge_base.assert_any_call('feature')
-    mock_merge_base.assert_any_call('remote_feature')
-    mock_runs.assert_any_call('branch', '-m', 'master', 'main')
-    mock_runs.assert_any_call('branch', '--set-upstream-to', 'main', 'feature')
-    mock_runs.assert_any_call('branch', '--set-upstream-to', 'origin/main',
-                              'remote_feature')
+    def setUp(self):
+        self.addCleanup(mock.patch.stopall)
+
+    def test_no_remote(self):
+        def RunMock(*args):
+            if args[0] == 'remote':
+                return ''
+            self.fail('did not expect such run git command: %s' % args[0])
+
+        mock.patch('git_migrate_default_branch.git_common.run', RunMock).start()
+        with self.assertRaisesRegexp(RuntimeError, 'Could not find any remote'):
+            git_migrate_default_branch.main()
+
+    def test_migration_not_ready(self):
+        def RunMock(*args):
+            if args[0] == 'remote':
+                return 'origin\ngerrit'
+            raise Exception('Did not expect such run git command: %s' % args[0])
+
+        mock.patch('git_migrate_default_branch.git_common.run', RunMock).start()
+        mock.patch('git_migrate_default_branch.git_common.repo_root',
+                   return_value='.').start()
+        mock.patch('git_migrate_default_branch.scm.GIT.GetConfig',
+                   return_value='https://chromium.googlesource.com').start()
+        mock.patch('git_migrate_default_branch.gerrit_util.GetProjectHead',
+                   return_value=None).start()
+        with self.assertRaisesRegexp(RuntimeError, 'not migrated yet'):
+            git_migrate_default_branch.main()
+
+    def test_migration_no_master(self):
+        def RunMock(*args):
+            if args[0] == 'remote':
+                return 'origin\ngerrit'
+
+            if args[0] == 'fetch':
+                return
+
+            if args[0] == 'branch':
+                return
+
+            if args[0] == 'config':
+                return
+            raise Exception('Did not expect such run git command: %s' % args[0])
+
+        mock_runs = mock.patch('git_migrate_default_branch.git_common.run',
+                               side_effect=RunMock).start()
+        mock.patch('git_migrate_default_branch.git_common.repo_root',
+                   return_value='.').start()
+        mock.patch('git_migrate_default_branch.scm.GIT.GetConfig',
+                   return_value='https://chromium.googlesource.com').start()
+        mock.patch('git_migrate_default_branch.gerrit_util.GetProjectHead',
+                   return_value='refs/heads/main').start()
+
+        BranchesInfo = collections.namedtuple('BranchesInfo',
+                                              'hash upstream commits behind')
+        branches = {
+            '': None,  # always returned
+            'master': BranchesInfo('0000', 'origin/master', '0', '0'),
+            'feature': BranchesInfo('0000', 'master', '0', '0'),
+            'another_feature': BranchesInfo('0000', 'feature', '0', '0'),
+            'remote_feature': BranchesInfo('0000', 'origin/master', '0', '0'),
+        }
+        mock.patch('git_migrate_default_branch.git_common.get_branches_info',
+                   return_value=branches).start()
+        mock_merge_base = mock.patch(
+            'git_migrate_default_branch.git_common.remove_merge_base',
+            return_value=branches).start()
+
+        git_migrate_default_branch.main()
+        mock_merge_base.assert_any_call('feature')
+        mock_merge_base.assert_any_call('remote_feature')
+        mock_runs.assert_any_call('branch', '-m', 'master', 'main')
+        mock_runs.assert_any_call('branch', '--set-upstream-to', 'main',
+                                  'feature')
+        mock_runs.assert_any_call('branch', '--set-upstream-to', 'origin/main',
+                                  'remote_feature')
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 52 - 54
tests/git_number_test.py

@@ -2,7 +2,6 @@
 # Copyright 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for git_number.py"""
 
 import binascii
@@ -17,70 +16,69 @@ from testing_support import coverage_utils
 
 
 class Basic(git_test_utils.GitRepoReadWriteTestBase):
-  REPO_SCHEMA = """
+    REPO_SCHEMA = """
   A B C D E
     B   F E
   X Y     E
   """
 
-  @classmethod
-  def setUpClass(cls):
-    super(Basic, cls).setUpClass()
-    import git_number
-    cls.gn = git_number
-    cls.old_POOL_KIND = cls.gn.POOL_KIND
-    cls.gn.POOL_KIND = 'threads'
+    @classmethod
+    def setUpClass(cls):
+        super(Basic, cls).setUpClass()
+        import git_number
+        cls.gn = git_number
+        cls.old_POOL_KIND = cls.gn.POOL_KIND
+        cls.gn.POOL_KIND = 'threads'
 
-  @classmethod
-  def tearDownClass(cls):
-    cls.gn.POOL_KIND = cls.old_POOL_KIND
-    super(Basic, cls).tearDownClass()
+    @classmethod
+    def tearDownClass(cls):
+        cls.gn.POOL_KIND = cls.old_POOL_KIND
+        super(Basic, cls).tearDownClass()
 
-  def tearDown(self):
-    self.gn.clear_caches()
-    super(Basic, self).tearDown()
+    def tearDown(self):
+        self.gn.clear_caches()
+        super(Basic, self).tearDown()
 
-  def _git_number(self, refs, cache=False):
-    refs = [binascii.unhexlify(ref) for ref in refs]
-    self.repo.run(self.gn.load_generation_numbers, refs)
-    if cache:
-      self.repo.run(self.gn.finalize, refs)
-    return [self.gn.get_num(ref) for ref in refs]
+    def _git_number(self, refs, cache=False):
+        refs = [binascii.unhexlify(ref) for ref in refs]
+        self.repo.run(self.gn.load_generation_numbers, refs)
+        if cache:
+            self.repo.run(self.gn.finalize, refs)
+        return [self.gn.get_num(ref) for ref in refs]
 
-  def testBasic(self):
-    self.assertEqual([0], self._git_number([self.repo['A']]))
-    self.assertEqual([2], self._git_number([self.repo['F']]))
-    self.assertEqual([0], self._git_number([self.repo['X']]))
-    self.assertEqual([4], self._git_number([self.repo['E']]))
+    def testBasic(self):
+        self.assertEqual([0], self._git_number([self.repo['A']]))
+        self.assertEqual([2], self._git_number([self.repo['F']]))
+        self.assertEqual([0], self._git_number([self.repo['X']]))
+        self.assertEqual([4], self._git_number([self.repo['E']]))
 
-  def testInProcessCache(self):
-    self.assertEqual(
-        None,
-        self.repo.run(self.gn.get_num, binascii.unhexlify(self.repo['A'])))
-    self.assertEqual([4], self._git_number([self.repo['E']]))
-    self.assertEqual(
-        0,
-        self.repo.run(self.gn.get_num, binascii.unhexlify(self.repo['A'])))
+    def testInProcessCache(self):
+        self.assertEqual(
+            None,
+            self.repo.run(self.gn.get_num, binascii.unhexlify(self.repo['A'])))
+        self.assertEqual([4], self._git_number([self.repo['E']]))
+        self.assertEqual(
+            0, self.repo.run(self.gn.get_num,
+                             binascii.unhexlify(self.repo['A'])))
 
-  def testOnDiskCache(self):
-    self.assertEqual(
-        None,
-        self.repo.run(self.gn.get_num, binascii.unhexlify(self.repo['A'])))
-    self.assertEqual([4], self._git_number([self.repo['E']], cache=True))
-    self.assertEqual([4], self._git_number([self.repo['E']], cache=True))
-    self.gn.clear_caches()
-    self.assertEqual(
-        0,
-        self.repo.run(self.gn.get_num, binascii.unhexlify(self.repo['A'])))
-    self.gn.clear_caches()
-    self.repo.run(self.gn.clear_caches, True)
-    self.assertEqual(
-        None,
-        self.repo.run(self.gn.get_num, binascii.unhexlify(self.repo['A'])))
+    def testOnDiskCache(self):
+        self.assertEqual(
+            None,
+            self.repo.run(self.gn.get_num, binascii.unhexlify(self.repo['A'])))
+        self.assertEqual([4], self._git_number([self.repo['E']], cache=True))
+        self.assertEqual([4], self._git_number([self.repo['E']], cache=True))
+        self.gn.clear_caches()
+        self.assertEqual(
+            0, self.repo.run(self.gn.get_num,
+                             binascii.unhexlify(self.repo['A'])))
+        self.gn.clear_caches()
+        self.repo.run(self.gn.clear_caches, True)
+        self.assertEqual(
+            None,
+            self.repo.run(self.gn.get_num, binascii.unhexlify(self.repo['A'])))
 
 
 if __name__ == '__main__':
-  sys.exit(coverage_utils.covered_main(
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_number.py'),
-    '3.7'
-  ))
+    sys.exit(
+        coverage_utils.covered_main(
+            os.path.join(DEPOT_TOOLS_ROOT, 'git_number.py'), '3.7'))

+ 324 - 312
tests/git_rebase_update_test.py

@@ -2,7 +2,6 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for git_rebase_update.py"""
 
 import os
@@ -14,391 +13,404 @@ sys.path.insert(0, DEPOT_TOOLS_ROOT)
 from testing_support import coverage_utils
 from testing_support import git_test_utils
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
+
 class GitRebaseUpdateTest(git_test_utils.GitRepoReadWriteTestBase):
-  REPO_SCHEMA = """
+    REPO_SCHEMA = """
   A B C D E F G
     B H I J K
           J L
   """
 
-  @classmethod
-  def getRepoContent(cls, commit):
-    # Every commit X gets a file X with the content X
-    return {commit: {'data': commit.encode('utf-8')}}
-
-  @classmethod
-  def setUpClass(cls):
-    super(GitRebaseUpdateTest, cls).setUpClass()
-    import git_rebase_update, git_new_branch, git_reparent_branch, git_common
-    import git_rename_branch
-    cls.reup = git_rebase_update
-    cls.rp = git_reparent_branch
-    cls.nb = git_new_branch
-    cls.mv = git_rename_branch
-    cls.gc = git_common
-    cls.gc.TEST_MODE = True
-
-  def setUp(self):
-    super(GitRebaseUpdateTest, self).setUp()
-    # Include branch_K, branch_L to make sure that ABCDEFG all get the
-    # same commit hashes as self.repo. Otherwise they get committed with the
-    # wrong timestamps, due to commit ordering.
-    # TODO(iannucci): Make commit timestamps deterministic in left to right, top
-    #                 to bottom order, not in lexi-topographical order.
-    origin_schema = git_test_utils.GitRepoSchema("""
+    @classmethod
+    def getRepoContent(cls, commit):
+        # Every commit X gets a file X with the content X
+        return {commit: {'data': commit.encode('utf-8')}}
+
+    @classmethod
+    def setUpClass(cls):
+        super(GitRebaseUpdateTest, cls).setUpClass()
+        import git_rebase_update, git_new_branch, git_reparent_branch, git_common
+        import git_rename_branch
+        cls.reup = git_rebase_update
+        cls.rp = git_reparent_branch
+        cls.nb = git_new_branch
+        cls.mv = git_rename_branch
+        cls.gc = git_common
+        cls.gc.TEST_MODE = True
+
+    def setUp(self):
+        super(GitRebaseUpdateTest, self).setUp()
+        # Include branch_K, branch_L to make sure that ABCDEFG all get the
+        # same commit hashes as self.repo. Otherwise they get committed with the
+        # wrong timestamps, due to commit ordering.
+        # TODO(iannucci): Make commit timestamps deterministic in left to right,
+        # top to bottom order, not in lexi-topographical order.
+        origin_schema = git_test_utils.GitRepoSchema(
+            """
     A B C D E F G M N O
       B H I J K
             J L
     """, self.getRepoContent)
-    self.origin = origin_schema.reify()
-    self.origin.git('checkout', 'main')
-    self.origin.git('branch', '-d', *['branch_'+l for l in 'KLG'])
-
-    self.repo.git('remote', 'add', 'origin', self.origin.repo_path)
-    self.repo.git('config', '--add', 'remote.origin.fetch',
-                  '+refs/tags/*:refs/tags/*')
-    self.repo.git('update-ref', 'refs/remotes/origin/main', 'tag_E')
-    self.repo.git('branch', '--set-upstream-to', 'branch_G', 'branch_K')
-    self.repo.git('branch', '--set-upstream-to', 'branch_K', 'branch_L')
-    self.repo.git('branch', '--set-upstream-to', 'origin/main', 'branch_G')
-
-    self.repo.to_schema_refs += ['origin/main']
-
-  def tearDown(self):
-    self.origin.nuke()
-    super(GitRebaseUpdateTest, self).tearDown()
-
-  def testRebaseUpdate(self):
-    self.repo.git('checkout', 'branch_K')
-
-    self.repo.run(self.nb.main, ['foobar'])
-    self.assertEqual(self.repo.git('rev-parse', 'HEAD').stdout,
-                     self.repo.git('rev-parse', 'origin/main').stdout)
-
-    with self.repo.open('foobar', 'w') as f:
-      f.write('this is the foobar file')
-    self.repo.git('add', 'foobar')
-    self.repo.git_commit('foobar1')
-
-    with self.repo.open('foobar', 'w') as f:
-      f.write('totes the Foobar file')
-    self.repo.git_commit('foobar2')
-
-    self.repo.run(self.nb.main, ['--upstream-current', 'int1_foobar'])
-    self.repo.run(self.nb.main, ['--upstream-current', 'int2_foobar'])
-    self.repo.run(self.nb.main, ['--upstream-current', 'sub_foobar'])
-    with self.repo.open('foobar', 'w') as f:
-        f.write('some more foobaring')
-    self.repo.git('add', 'foobar')
-    self.repo.git_commit('foobar3')
-
-    self.repo.git('checkout', 'branch_K')
-    self.repo.run(self.nb.main, ['--upstream-current', 'sub_K'])
-    with self.repo.open('K', 'w') as f:
-      f.write('This depends on K')
-    self.repo.git_commit('sub_K')
-
-    self.repo.run(self.nb.main, ['old_branch'])
-    self.repo.git('reset', '--hard', self.repo['A'])
-    with self.repo.open('old_file', 'w') as f:
-      f.write('old_files we want to keep around')
-    self.repo.git('add', 'old_file')
-    self.repo.git_commit('old_file')
-    self.repo.git('config', 'branch.old_branch.dormant', 'true')
-
-    self.repo.git('checkout', 'origin/main')
-
-    self.assertSchema("""
+        self.origin = origin_schema.reify()
+        self.origin.git('checkout', 'main')
+        self.origin.git('branch', '-d', *['branch_' + l for l in 'KLG'])
+
+        self.repo.git('remote', 'add', 'origin', self.origin.repo_path)
+        self.repo.git('config', '--add', 'remote.origin.fetch',
+                      '+refs/tags/*:refs/tags/*')
+        self.repo.git('update-ref', 'refs/remotes/origin/main', 'tag_E')
+        self.repo.git('branch', '--set-upstream-to', 'branch_G', 'branch_K')
+        self.repo.git('branch', '--set-upstream-to', 'branch_K', 'branch_L')
+        self.repo.git('branch', '--set-upstream-to', 'origin/main', 'branch_G')
+
+        self.repo.to_schema_refs += ['origin/main']
+
+    def tearDown(self):
+        self.origin.nuke()
+        super(GitRebaseUpdateTest, self).tearDown()
+
+    def testRebaseUpdate(self):
+        self.repo.git('checkout', 'branch_K')
+
+        self.repo.run(self.nb.main, ['foobar'])
+        self.assertEqual(
+            self.repo.git('rev-parse', 'HEAD').stdout,
+            self.repo.git('rev-parse', 'origin/main').stdout)
+
+        with self.repo.open('foobar', 'w') as f:
+            f.write('this is the foobar file')
+        self.repo.git('add', 'foobar')
+        self.repo.git_commit('foobar1')
+
+        with self.repo.open('foobar', 'w') as f:
+            f.write('totes the Foobar file')
+        self.repo.git_commit('foobar2')
+
+        self.repo.run(self.nb.main, ['--upstream-current', 'int1_foobar'])
+        self.repo.run(self.nb.main, ['--upstream-current', 'int2_foobar'])
+        self.repo.run(self.nb.main, ['--upstream-current', 'sub_foobar'])
+        with self.repo.open('foobar', 'w') as f:
+            f.write('some more foobaring')
+        self.repo.git('add', 'foobar')
+        self.repo.git_commit('foobar3')
+
+        self.repo.git('checkout', 'branch_K')
+        self.repo.run(self.nb.main, ['--upstream-current', 'sub_K'])
+        with self.repo.open('K', 'w') as f:
+            f.write('This depends on K')
+        self.repo.git_commit('sub_K')
+
+        self.repo.run(self.nb.main, ['old_branch'])
+        self.repo.git('reset', '--hard', self.repo['A'])
+        with self.repo.open('old_file', 'w') as f:
+            f.write('old_files we want to keep around')
+        self.repo.git('add', 'old_file')
+        self.repo.git_commit('old_file')
+        self.repo.git('config', 'branch.old_branch.dormant', 'true')
+
+        self.repo.git('checkout', 'origin/main')
+
+        self.assertSchema("""
     A B H I J K sub_K
             J L
       B C D E foobar1 foobar2 foobar3
             E F G
     A old_file
     """)
-    self.assertEqual(self.repo['A'], self.origin['A'])
-    self.assertEqual(self.repo['E'], self.origin['E'])
+        self.assertEqual(self.repo['A'], self.origin['A'])
+        self.assertEqual(self.repo['E'], self.origin['E'])
 
-    with self.repo.open('bob', 'wb') as f:
-      f.write(b'testing auto-freeze/thaw')
+        with self.repo.open('bob', 'wb') as f:
+            f.write(b'testing auto-freeze/thaw')
 
-    output, _ = self.repo.capture_stdio(self.reup.main)
-    self.assertIn('Cannot rebase-update', output)
+        output, _ = self.repo.capture_stdio(self.reup.main)
+        self.assertIn('Cannot rebase-update', output)
 
-    self.repo.run(self.nb.main, ['empty_branch'])
-    self.repo.run(self.nb.main, ['--upstream-current', 'empty_branch2'])
+        self.repo.run(self.nb.main, ['empty_branch'])
+        self.repo.run(self.nb.main, ['--upstream-current', 'empty_branch2'])
 
-    self.repo.git('checkout', 'branch_K')
+        self.repo.git('checkout', 'branch_K')
 
-    output, _ = self.repo.capture_stdio(self.reup.main)
+        output, _ = self.repo.capture_stdio(self.reup.main)
 
-    self.assertIn('Rebasing: branch_G', output)
-    self.assertIn('Rebasing: branch_K', output)
-    self.assertIn('Rebasing: branch_L', output)
-    self.assertIn('Rebasing: foobar', output)
-    self.assertIn('Rebasing: sub_K', output)
-    self.assertIn('Deleted branch branch_G', output)
-    self.assertIn('Deleted branch empty_branch', output)
-    self.assertIn('Deleted branch empty_branch2', output)
-    self.assertIn('Deleted branch int1_foobar', output)
-    self.assertIn('Deleted branch int2_foobar', output)
-    self.assertIn('Reparented branch_K to track origin/main', output)
-    self.assertIn('Reparented sub_foobar to track foobar', output)
+        self.assertIn('Rebasing: branch_G', output)
+        self.assertIn('Rebasing: branch_K', output)
+        self.assertIn('Rebasing: branch_L', output)
+        self.assertIn('Rebasing: foobar', output)
+        self.assertIn('Rebasing: sub_K', output)
+        self.assertIn('Deleted branch branch_G', output)
+        self.assertIn('Deleted branch empty_branch', output)
+        self.assertIn('Deleted branch empty_branch2', output)
+        self.assertIn('Deleted branch int1_foobar', output)
+        self.assertIn('Deleted branch int2_foobar', output)
+        self.assertIn('Reparented branch_K to track origin/main', output)
+        self.assertIn('Reparented sub_foobar to track foobar', output)
 
-    self.assertSchema("""
+        self.assertSchema("""
     A B C D E F G M N O H I J K sub_K
                               K L
                       O foobar1 foobar2 foobar3
     A old_file
     """)
 
-    output, _ = self.repo.capture_stdio(self.reup.main)
-    self.assertIn('branch_K up-to-date', output)
-    self.assertIn('branch_L up-to-date', output)
-    self.assertIn('foobar up-to-date', output)
-    self.assertIn('sub_K up-to-date', output)
-
-    with self.repo.open('bob') as f:
-      self.assertEqual(b'testing auto-freeze/thaw', f.read())
-
-    self.assertEqual(self.repo.git('status', '--porcelain').stdout, '?? bob\n')
-
-    self.repo.git('checkout', 'origin/main')
-    _, err = self.repo.capture_stdio(self.rp.main, [])
-    self.assertIn('Must specify new parent somehow', err)
-    _, err = self.repo.capture_stdio(self.rp.main, ['foobar'])
-    self.assertIn('Must be on the branch', err)
-
-    self.repo.git('checkout', 'branch_K')
-    _, err = self.repo.capture_stdio(self.rp.main, ['origin/main'])
-    self.assertIn('Cannot reparent a branch to its existing parent', err)
-    output, _ = self.repo.capture_stdio(self.rp.main, ['foobar'])
-    self.assertIn('Rebasing: branch_K', output)
-    self.assertIn('Rebasing: sub_K', output)
-    self.assertIn('Rebasing: branch_L', output)
-
-    self.assertSchema("""
+        output, _ = self.repo.capture_stdio(self.reup.main)
+        self.assertIn('branch_K up-to-date', output)
+        self.assertIn('branch_L up-to-date', output)
+        self.assertIn('foobar up-to-date', output)
+        self.assertIn('sub_K up-to-date', output)
+
+        with self.repo.open('bob') as f:
+            self.assertEqual(b'testing auto-freeze/thaw', f.read())
+
+        self.assertEqual(
+            self.repo.git('status', '--porcelain').stdout, '?? bob\n')
+
+        self.repo.git('checkout', 'origin/main')
+        _, err = self.repo.capture_stdio(self.rp.main, [])
+        self.assertIn('Must specify new parent somehow', err)
+        _, err = self.repo.capture_stdio(self.rp.main, ['foobar'])
+        self.assertIn('Must be on the branch', err)
+
+        self.repo.git('checkout', 'branch_K')
+        _, err = self.repo.capture_stdio(self.rp.main, ['origin/main'])
+        self.assertIn('Cannot reparent a branch to its existing parent', err)
+        output, _ = self.repo.capture_stdio(self.rp.main, ['foobar'])
+        self.assertIn('Rebasing: branch_K', output)
+        self.assertIn('Rebasing: sub_K', output)
+        self.assertIn('Rebasing: branch_L', output)
+
+        self.assertSchema("""
     A B C D E F G M N O foobar1 foobar2 H I J K L
                                 foobar2 foobar3
                                               K sub_K
     A old_file
     """)
 
-    self.repo.git('checkout', 'sub_K')
-    output, _ = self.repo.capture_stdio(self.rp.main, ['foobar'])
-    self.assertIn('Squashing failed', output)
+        self.repo.git('checkout', 'sub_K')
+        output, _ = self.repo.capture_stdio(self.rp.main, ['foobar'])
+        self.assertIn('Squashing failed', output)
 
-    self.assertTrue(self.repo.run(self.gc.in_rebase))
+        self.assertTrue(self.repo.run(self.gc.in_rebase))
 
-    self.repo.git('rebase', '--abort')
-    self.assertIsNone(self.repo.run(self.gc.thaw))
+        self.repo.git('rebase', '--abort')
+        self.assertIsNone(self.repo.run(self.gc.thaw))
 
-    self.assertSchema("""
+        self.assertSchema("""
     A B C D E F G M N O foobar1 foobar2 H I J K L
                                 foobar2 foobar3
     A old_file
                                               K sub_K
     """)
 
-    self.assertEqual(self.repo.git('status', '--porcelain').stdout, '?? bob\n')
-
-    branches = self.repo.run(set, self.gc.branches())
-    self.assertEqual(branches, {'branch_K', 'main', 'sub_K', 'root_A',
-                                'branch_L', 'old_branch', 'foobar',
-                                'sub_foobar'})
-
-    self.repo.git('checkout', 'branch_K')
-    self.repo.run(self.mv.main, ['special_K'])
-
-    branches = self.repo.run(set, self.gc.branches())
-    self.assertEqual(branches, {'special_K', 'main', 'sub_K', 'root_A',
-                                'branch_L', 'old_branch', 'foobar',
-                                'sub_foobar'})
-
-    self.repo.git('checkout', 'origin/main')
-    _, err = self.repo.capture_stdio(self.mv.main, ['special_K', 'cool branch'])
-    self.assertIn('fatal: \'cool branch\' is not a valid branch name', err)
-
-    self.repo.run(self.mv.main, ['special_K', 'cool_branch'])
-    branches = self.repo.run(set, self.gc.branches())
-    # This check fails with git 2.4 (see crbug.com/487172)
-    self.assertEqual(branches, {'cool_branch', 'main', 'sub_K', 'root_A',
-                                'branch_L', 'old_branch', 'foobar',
-                                'sub_foobar'})
-
-    _, branch_tree = self.repo.run(self.gc.get_branch_tree)
-    self.assertEqual(branch_tree['sub_K'], 'foobar')
-
-
-  def testRebaseConflicts(self):
-    # Pretend that branch_L landed
-    self.origin.git('checkout', 'main')
-    with self.origin.open('L', 'w') as f:
-      f.write('L')
-    self.origin.git('add', 'L')
-    self.origin.git_commit('L')
-
-    # Add a commit to branch_K so that things fail
-    self.repo.git('checkout', 'branch_K')
-    with self.repo.open('M', 'w') as f:
-      f.write('NOPE')
-    self.repo.git('add', 'M')
-    self.repo.git_commit('K NOPE')
-
-    # Add a commits to branch_L which will work when squashed
-    self.repo.git('checkout', 'branch_L')
-    self.repo.git('reset', 'branch_L~')
-    with self.repo.open('L', 'w') as f:
-      f.write('NOPE')
-    self.repo.git('add', 'L')
-    self.repo.git_commit('L NOPE')
-    with self.repo.open('L', 'w') as f:
-      f.write('L')
-    self.repo.git('add', 'L')
-    self.repo.git_commit('L YUP')
-
-    # start on a branch which will be deleted
-    self.repo.git('checkout', 'branch_G')
-
-    output, _ = self.repo.capture_stdio(self.reup.main)
-    self.assertIn('branch.branch_K.dormant true', output)
-
-    output, _ = self.repo.capture_stdio(self.reup.main)
-    self.assertIn('Rebase in progress', output)
-
-    self.repo.git('checkout', '--theirs', 'M')
-    self.repo.git('rebase', '--skip')
-
-    output, _ = self.repo.capture_stdio(self.reup.main)
-    self.assertIn('Failed! Attempting to squash', output)
-    self.assertIn('Deleted branch branch_G', output)
-    self.assertIn('Deleted branch branch_L', output)
-    self.assertIn('\'branch_G\' was merged', output)
-    self.assertIn('checking out \'origin/main\'', output)
-
-  def testRebaseConflictsKeepGoing(self):
-    # Pretend that branch_L landed
-    self.origin.git('checkout', 'main')
-    with self.origin.open('L', 'w') as f:
-      f.write('L')
-    self.origin.git('add', 'L')
-    self.origin.git_commit('L')
-
-    # Add a commit to branch_K so that things fail
-    self.repo.git('checkout', 'branch_K')
-    with self.repo.open('M', 'w') as f:
-      f.write('NOPE')
-    self.repo.git('add', 'M')
-    self.repo.git_commit('K NOPE')
-
-    # Add a commits to branch_L which will work when squashed
-    self.repo.git('checkout', 'branch_L')
-    self.repo.git('reset', 'branch_L~')
-    with self.repo.open('L', 'w') as f:
-      f.write('NOPE')
-    self.repo.git('add', 'L')
-    self.repo.git_commit('L NOPE')
-    with self.repo.open('L', 'w') as f:
-      f.write('L')
-    self.repo.git('add', 'L')
-    self.repo.git_commit('L YUP')
-
-    # start on a branch which will be deleted
-    self.repo.git('checkout', 'branch_G')
-
-    self.repo.git('config', 'branch.branch_K.dormant', 'false')
-    output, _ = self.repo.capture_stdio(self.reup.main, ['-k'])
-    self.assertIn('--keep-going set, continuing with next branch.', output)
-    self.assertIn('could not be cleanly rebased:', output)
-    self.assertIn('  branch_K', output)
-
-
-  def testTrackTag(self):
-    self.origin.git('tag', 'lkgr', self.origin['M'])
-    self.repo.git('tag', 'lkgr', self.repo['D'])
-
-    self.repo.git('config', 'branch.branch_G.remote', '.')
-    self.repo.git('config', 'branch.branch_G.merge', 'refs/tags/lkgr')
-
-    self.assertIn(
-        'fatal: \'foo bar\' is not a valid branch name',
-        self.repo.capture_stdio(self.nb.main, ['--lkgr', 'foo bar'])[1])
-
-    self.repo.run(self.nb.main, ['--lkgr', 'foobar'])
-
-    with self.repo.open('foobar', 'w') as f:
-      f.write('this is the foobar file')
-    self.repo.git('add', 'foobar')
-    self.repo.git_commit('foobar1')
-
-    with self.repo.open('foobar', 'w') as f:
-      f.write('totes the Foobar file')
-    self.repo.git_commit('foobar2')
-
-    self.assertSchema("""
+        self.assertEqual(
+            self.repo.git('status', '--porcelain').stdout, '?? bob\n')
+
+        branches = self.repo.run(set, self.gc.branches())
+        self.assertEqual(
+            branches, {
+                'branch_K', 'main', 'sub_K', 'root_A', 'branch_L', 'old_branch',
+                'foobar', 'sub_foobar'
+            })
+
+        self.repo.git('checkout', 'branch_K')
+        self.repo.run(self.mv.main, ['special_K'])
+
+        branches = self.repo.run(set, self.gc.branches())
+        self.assertEqual(
+            branches, {
+                'special_K', 'main', 'sub_K', 'root_A', 'branch_L',
+                'old_branch', 'foobar', 'sub_foobar'
+            })
+
+        self.repo.git('checkout', 'origin/main')
+        _, err = self.repo.capture_stdio(self.mv.main,
+                                         ['special_K', 'cool branch'])
+        self.assertIn('fatal: \'cool branch\' is not a valid branch name', err)
+
+        self.repo.run(self.mv.main, ['special_K', 'cool_branch'])
+        branches = self.repo.run(set, self.gc.branches())
+        # This check fails with git 2.4 (see crbug.com/487172)
+        self.assertEqual(
+            branches, {
+                'cool_branch', 'main', 'sub_K', 'root_A', 'branch_L',
+                'old_branch', 'foobar', 'sub_foobar'
+            })
+
+        _, branch_tree = self.repo.run(self.gc.get_branch_tree)
+        self.assertEqual(branch_tree['sub_K'], 'foobar')
+
+    def testRebaseConflicts(self):
+        # Pretend that branch_L landed
+        self.origin.git('checkout', 'main')
+        with self.origin.open('L', 'w') as f:
+            f.write('L')
+        self.origin.git('add', 'L')
+        self.origin.git_commit('L')
+
+        # Add a commit to branch_K so that things fail
+        self.repo.git('checkout', 'branch_K')
+        with self.repo.open('M', 'w') as f:
+            f.write('NOPE')
+        self.repo.git('add', 'M')
+        self.repo.git_commit('K NOPE')
+
+        # Add a commits to branch_L which will work when squashed
+        self.repo.git('checkout', 'branch_L')
+        self.repo.git('reset', 'branch_L~')
+        with self.repo.open('L', 'w') as f:
+            f.write('NOPE')
+        self.repo.git('add', 'L')
+        self.repo.git_commit('L NOPE')
+        with self.repo.open('L', 'w') as f:
+            f.write('L')
+        self.repo.git('add', 'L')
+        self.repo.git_commit('L YUP')
+
+        # start on a branch which will be deleted
+        self.repo.git('checkout', 'branch_G')
+
+        output, _ = self.repo.capture_stdio(self.reup.main)
+        self.assertIn('branch.branch_K.dormant true', output)
+
+        output, _ = self.repo.capture_stdio(self.reup.main)
+        self.assertIn('Rebase in progress', output)
+
+        self.repo.git('checkout', '--theirs', 'M')
+        self.repo.git('rebase', '--skip')
+
+        output, _ = self.repo.capture_stdio(self.reup.main)
+        self.assertIn('Failed! Attempting to squash', output)
+        self.assertIn('Deleted branch branch_G', output)
+        self.assertIn('Deleted branch branch_L', output)
+        self.assertIn('\'branch_G\' was merged', output)
+        self.assertIn('checking out \'origin/main\'', output)
+
+    def testRebaseConflictsKeepGoing(self):
+        # Pretend that branch_L landed
+        self.origin.git('checkout', 'main')
+        with self.origin.open('L', 'w') as f:
+            f.write('L')
+        self.origin.git('add', 'L')
+        self.origin.git_commit('L')
+
+        # Add a commit to branch_K so that things fail
+        self.repo.git('checkout', 'branch_K')
+        with self.repo.open('M', 'w') as f:
+            f.write('NOPE')
+        self.repo.git('add', 'M')
+        self.repo.git_commit('K NOPE')
+
+        # Add a commits to branch_L which will work when squashed
+        self.repo.git('checkout', 'branch_L')
+        self.repo.git('reset', 'branch_L~')
+        with self.repo.open('L', 'w') as f:
+            f.write('NOPE')
+        self.repo.git('add', 'L')
+        self.repo.git_commit('L NOPE')
+        with self.repo.open('L', 'w') as f:
+            f.write('L')
+        self.repo.git('add', 'L')
+        self.repo.git_commit('L YUP')
+
+        # start on a branch which will be deleted
+        self.repo.git('checkout', 'branch_G')
+
+        self.repo.git('config', 'branch.branch_K.dormant', 'false')
+        output, _ = self.repo.capture_stdio(self.reup.main, ['-k'])
+        self.assertIn('--keep-going set, continuing with next branch.', output)
+        self.assertIn('could not be cleanly rebased:', output)
+        self.assertIn('  branch_K', output)
+
+    def testTrackTag(self):
+        self.origin.git('tag', 'lkgr', self.origin['M'])
+        self.repo.git('tag', 'lkgr', self.repo['D'])
+
+        self.repo.git('config', 'branch.branch_G.remote', '.')
+        self.repo.git('config', 'branch.branch_G.merge', 'refs/tags/lkgr')
+
+        self.assertIn(
+            'fatal: \'foo bar\' is not a valid branch name',
+            self.repo.capture_stdio(self.nb.main, ['--lkgr', 'foo bar'])[1])
+
+        self.repo.run(self.nb.main, ['--lkgr', 'foobar'])
+
+        with self.repo.open('foobar', 'w') as f:
+            f.write('this is the foobar file')
+        self.repo.git('add', 'foobar')
+        self.repo.git_commit('foobar1')
+
+        with self.repo.open('foobar', 'w') as f:
+            f.write('totes the Foobar file')
+        self.repo.git_commit('foobar2')
+
+        self.assertSchema("""
     A B H I J K
             J L
       B C D E F G
           D foobar1 foobar2
     """)
-    self.assertEqual(self.repo['A'], self.origin['A'])
-    self.assertEqual(self.repo['G'], self.origin['G'])
-
-    output, _ = self.repo.capture_stdio(self.reup.main)
-    self.assertIn('Rebasing: branch_G', output)
-    self.assertIn('Rebasing: branch_K', output)
-    self.assertIn('Rebasing: branch_L', output)
-    self.assertIn('Rebasing: foobar', output)
-    self.assertEqual(self.repo.git('rev-parse', 'lkgr').stdout.strip(),
-                      self.origin['M'])
-
-    self.assertSchema("""
+        self.assertEqual(self.repo['A'], self.origin['A'])
+        self.assertEqual(self.repo['G'], self.origin['G'])
+
+        output, _ = self.repo.capture_stdio(self.reup.main)
+        self.assertIn('Rebasing: branch_G', output)
+        self.assertIn('Rebasing: branch_K', output)
+        self.assertIn('Rebasing: branch_L', output)
+        self.assertIn('Rebasing: foobar', output)
+        self.assertEqual(
+            self.repo.git('rev-parse', 'lkgr').stdout.strip(), self.origin['M'])
+
+        self.assertSchema("""
     A B C D E F G M N O
                   M H I J K L
                   M foobar1 foobar2
     """)
 
-    _, err = self.repo.capture_stdio(self.rp.main, ['tag F'])
-    self.assertIn('fatal: invalid reference', err)
+        _, err = self.repo.capture_stdio(self.rp.main, ['tag F'])
+        self.assertIn('fatal: invalid reference', err)
 
-    output, _ = self.repo.capture_stdio(self.rp.main, ['tag_F'])
-    self.assertIn('to track tag_F [tag] (was lkgr [tag])', output)
+        output, _ = self.repo.capture_stdio(self.rp.main, ['tag_F'])
+        self.assertIn('to track tag_F [tag] (was lkgr [tag])', output)
 
-    self.assertSchema("""
+        self.assertSchema("""
     A B C D E F G M N O
                   M H I J K L
               F foobar1 foobar2
     """)
 
-    output, _ = self.repo.capture_stdio(self.rp.main, ['--lkgr'])
-    self.assertIn('to track lkgr [tag] (was tag_F [tag])', output)
+        output, _ = self.repo.capture_stdio(self.rp.main, ['--lkgr'])
+        self.assertIn('to track lkgr [tag] (was tag_F [tag])', output)
 
-    self.assertSchema("""
+        self.assertSchema("""
     A B C D E F G M N O
                   M H I J K L
                   M foobar1 foobar2
     """)
 
-    output, _ = self.repo.capture_stdio(self.rp.main, ['--root'])
-    self.assertIn('to track origin/main (was lkgr [tag])', output)
+        output, _ = self.repo.capture_stdio(self.rp.main, ['--root'])
+        self.assertIn('to track origin/main (was lkgr [tag])', output)
 
-    self.assertSchema("""
+        self.assertSchema("""
     A B C D E F G M N O foobar1 foobar2
                   M H I J K L
     """)
 
-  def testReparentBranchWithoutUpstream(self):
-    self.repo.git('branch', 'nerp')
-    self.repo.git('checkout', 'nerp')
+    def testReparentBranchWithoutUpstream(self):
+        self.repo.git('branch', 'nerp')
+        self.repo.git('checkout', 'nerp')
 
-    _, err = self.repo.capture_stdio(self.rp.main, ['branch_K'])
+        _, err = self.repo.capture_stdio(self.rp.main, ['branch_K'])
 
-    self.assertIn('Unable to determine nerp@{upstream}', err)
+        self.assertIn('Unable to determine nerp@{upstream}', err)
 
 
 if __name__ == '__main__':
-  sys.exit(coverage_utils.covered_main((
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_rebase_update.py'),
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_new_branch.py'),
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_reparent_branch.py'),
-    os.path.join(DEPOT_TOOLS_ROOT, 'git_rename_branch.py')
-  )))
+    sys.exit(
+        coverage_utils.covered_main(
+            (os.path.join(DEPOT_TOOLS_ROOT, 'git_rebase_update.py'),
+             os.path.join(DEPOT_TOOLS_ROOT, 'git_new_branch.py'),
+             os.path.join(DEPOT_TOOLS_ROOT, 'git_reparent_branch.py'),
+             os.path.join(DEPOT_TOOLS_ROOT, 'git_rename_branch.py'))))

+ 113 - 110
tests/gsutil_test.py

@@ -2,12 +2,10 @@
 # Copyright 2014 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Test gsutil.py."""
 
 from __future__ import unicode_literals
 
-
 import base64
 import hashlib
 import io
@@ -30,121 +28,126 @@ import gsutil
 
 
 class TestError(Exception):
-  pass
+    pass
 
 
 class FakeCall(object):
-  def __init__(self):
-    self.expectations = []
+    def __init__(self):
+        self.expectations = []
 
-  def add_expectation(self, *args, **kwargs):
-    returns = kwargs.pop('_returns', None)
-    self.expectations.append((args, kwargs, returns))
+    def add_expectation(self, *args, **kwargs):
+        returns = kwargs.pop('_returns', None)
+        self.expectations.append((args, kwargs, returns))
 
-  def __call__(self, *args, **kwargs):
-    if not self.expectations:
-      raise TestError('Got unexpected\n%s\n%s' % (args, kwargs))
-    exp_args, exp_kwargs, exp_returns = self.expectations.pop(0)
-    if args != exp_args or kwargs != exp_kwargs:
-      message = 'Expected:\n  args: %s\n  kwargs: %s\n' % (exp_args, exp_kwargs)
-      message += 'Got:\n  args: %s\n  kwargs: %s\n' % (args, kwargs)
-      raise TestError(message)
-    return exp_returns
+    def __call__(self, *args, **kwargs):
+        if not self.expectations:
+            raise TestError('Got unexpected\n%s\n%s' % (args, kwargs))
+        exp_args, exp_kwargs, exp_returns = self.expectations.pop(0)
+        if args != exp_args or kwargs != exp_kwargs:
+            message = 'Expected:\n  args: %s\n  kwargs: %s\n' % (exp_args,
+                                                                 exp_kwargs)
+            message += 'Got:\n  args: %s\n  kwargs: %s\n' % (args, kwargs)
+            raise TestError(message)
+        return exp_returns
 
 
 class GsutilUnitTests(unittest.TestCase):
-  def setUp(self):
-    self.fake = FakeCall()
-    self.tempdir = tempfile.mkdtemp()
-    self.old_urlopen = getattr(urllib.request, 'urlopen')
-    self.old_call = getattr(subprocess, 'call')
-    setattr(urllib.request, 'urlopen', self.fake)
-    setattr(subprocess, 'call', self.fake)
-
-  def tearDown(self):
-    self.assertEqual(self.fake.expectations, [])
-    shutil.rmtree(self.tempdir)
-    setattr(urllib.request, 'urlopen', self.old_urlopen)
-    setattr(subprocess, 'call', self.old_call)
-
-  def test_download_gsutil(self):
-    version = gsutil.VERSION
-    filename = 'gsutil_%s.zip' % version
-    full_filename = os.path.join(self.tempdir, filename)
-    fake_file = b'This is gsutil.zip'
-    fake_file2 = b'This is other gsutil.zip'
-    url = '%s%s' % (gsutil.GSUTIL_URL, filename)
-    self.fake.add_expectation(url, _returns=io.BytesIO(fake_file))
-
-    self.assertEqual(
-        gsutil.download_gsutil(version, self.tempdir), full_filename)
-    with open(full_filename, 'rb') as f:
-      self.assertEqual(fake_file, f.read())
-
-    metadata_url = gsutil.API_URL + filename
-    md5_calc = hashlib.md5()
-    md5_calc.update(fake_file)
-    b64_md5 = base64.b64encode(md5_calc.hexdigest().encode('utf-8'))
-    self.fake.add_expectation(
-        metadata_url,
-        _returns=io.BytesIO(
-            json.dumps({'md5Hash': b64_md5.decode('utf-8')}).encode('utf-8')))
-    self.assertEqual(
-        gsutil.download_gsutil(version, self.tempdir), full_filename)
-    with open(full_filename, 'rb') as f:
-      self.assertEqual(fake_file, f.read())
-    self.assertEqual(self.fake.expectations, [])
-
-    self.fake.add_expectation(
-        metadata_url,
-        _returns=io.BytesIO(
-            json.dumps({
-              'md5Hash': base64.b64encode(b'aaaaaaa').decode('utf-8')  # Bad MD5
-            }).encode('utf-8')))
-    self.fake.add_expectation(url, _returns=io.BytesIO(fake_file2))
-    self.assertEqual(
-        gsutil.download_gsutil(version, self.tempdir), full_filename)
-    with open(full_filename, 'rb') as f:
-      self.assertEqual(fake_file2, f.read())
-    self.assertEqual(self.fake.expectations, [])
-
-  def test_ensure_gsutil_full(self):
-    version = gsutil.VERSION
-    gsutil_dir = os.path.join(self.tempdir, 'gsutil_%s' % version, 'gsutil')
-    gsutil_bin = os.path.join(gsutil_dir, 'gsutil')
-    gsutil_flag = os.path.join(gsutil_dir, 'install.flag')
-    os.makedirs(gsutil_dir)
-
-    zip_filename = 'gsutil_%s.zip' % version
-    url = '%s%s' % (gsutil.GSUTIL_URL, zip_filename)
-    _, tempzip = tempfile.mkstemp()
-    fake_gsutil = 'Fake gsutil'
-    with zipfile.ZipFile(tempzip, 'w') as zf:
-      zf.writestr('gsutil/gsutil', fake_gsutil)
-    with open(tempzip, 'rb') as f:
-      self.fake.add_expectation(url, _returns=io.BytesIO(f.read()))
-
-    # This should write the gsutil_bin with 'Fake gsutil'
-    gsutil.ensure_gsutil(version, self.tempdir, False)
-    self.assertTrue(os.path.exists(gsutil_bin))
-    with open(gsutil_bin, 'r') as f:
-      self.assertEqual(f.read(), fake_gsutil)
-    self.assertTrue(os.path.exists(gsutil_flag))
-    self.assertEqual(self.fake.expectations, [])
-
-  def test_ensure_gsutil_short(self):
-    version = gsutil.VERSION
-    gsutil_dir = os.path.join(self.tempdir, 'gsutil_%s' % version, 'gsutil')
-    gsutil_bin = os.path.join(gsutil_dir, 'gsutil')
-    gsutil_flag = os.path.join(gsutil_dir, 'install.flag')
-    os.makedirs(gsutil_dir)
-
-    with open(gsutil_bin, 'w') as f:
-      f.write('Foobar')
-    with open(gsutil_flag, 'w') as f:
-      f.write('Barbaz')
-    self.assertEqual(
-        gsutil.ensure_gsutil(version, self.tempdir, False), gsutil_bin)
+    def setUp(self):
+        self.fake = FakeCall()
+        self.tempdir = tempfile.mkdtemp()
+        self.old_urlopen = getattr(urllib.request, 'urlopen')
+        self.old_call = getattr(subprocess, 'call')
+        setattr(urllib.request, 'urlopen', self.fake)
+        setattr(subprocess, 'call', self.fake)
+
+    def tearDown(self):
+        self.assertEqual(self.fake.expectations, [])
+        shutil.rmtree(self.tempdir)
+        setattr(urllib.request, 'urlopen', self.old_urlopen)
+        setattr(subprocess, 'call', self.old_call)
+
+    def test_download_gsutil(self):
+        version = gsutil.VERSION
+        filename = 'gsutil_%s.zip' % version
+        full_filename = os.path.join(self.tempdir, filename)
+        fake_file = b'This is gsutil.zip'
+        fake_file2 = b'This is other gsutil.zip'
+        url = '%s%s' % (gsutil.GSUTIL_URL, filename)
+        self.fake.add_expectation(url, _returns=io.BytesIO(fake_file))
+
+        self.assertEqual(gsutil.download_gsutil(version, self.tempdir),
+                         full_filename)
+        with open(full_filename, 'rb') as f:
+            self.assertEqual(fake_file, f.read())
+
+        metadata_url = gsutil.API_URL + filename
+        md5_calc = hashlib.md5()
+        md5_calc.update(fake_file)
+        b64_md5 = base64.b64encode(md5_calc.hexdigest().encode('utf-8'))
+        self.fake.add_expectation(metadata_url,
+                                  _returns=io.BytesIO(
+                                      json.dumps({
+                                          'md5Hash':
+                                          b64_md5.decode('utf-8')
+                                      }).encode('utf-8')))
+        self.assertEqual(gsutil.download_gsutil(version, self.tempdir),
+                         full_filename)
+        with open(full_filename, 'rb') as f:
+            self.assertEqual(fake_file, f.read())
+        self.assertEqual(self.fake.expectations, [])
+
+        self.fake.add_expectation(
+            metadata_url,
+            _returns=io.BytesIO(
+                json.dumps({
+                    'md5Hash':
+                    base64.b64encode(b'aaaaaaa').decode('utf-8')  # Bad MD5
+                }).encode('utf-8')))
+        self.fake.add_expectation(url, _returns=io.BytesIO(fake_file2))
+        self.assertEqual(gsutil.download_gsutil(version, self.tempdir),
+                         full_filename)
+        with open(full_filename, 'rb') as f:
+            self.assertEqual(fake_file2, f.read())
+        self.assertEqual(self.fake.expectations, [])
+
+    def test_ensure_gsutil_full(self):
+        version = gsutil.VERSION
+        gsutil_dir = os.path.join(self.tempdir, 'gsutil_%s' % version, 'gsutil')
+        gsutil_bin = os.path.join(gsutil_dir, 'gsutil')
+        gsutil_flag = os.path.join(gsutil_dir, 'install.flag')
+        os.makedirs(gsutil_dir)
+
+        zip_filename = 'gsutil_%s.zip' % version
+        url = '%s%s' % (gsutil.GSUTIL_URL, zip_filename)
+        _, tempzip = tempfile.mkstemp()
+        fake_gsutil = 'Fake gsutil'
+        with zipfile.ZipFile(tempzip, 'w') as zf:
+            zf.writestr('gsutil/gsutil', fake_gsutil)
+        with open(tempzip, 'rb') as f:
+            self.fake.add_expectation(url, _returns=io.BytesIO(f.read()))
+
+        # This should write the gsutil_bin with 'Fake gsutil'
+        gsutil.ensure_gsutil(version, self.tempdir, False)
+        self.assertTrue(os.path.exists(gsutil_bin))
+        with open(gsutil_bin, 'r') as f:
+            self.assertEqual(f.read(), fake_gsutil)
+        self.assertTrue(os.path.exists(gsutil_flag))
+        self.assertEqual(self.fake.expectations, [])
+
+    def test_ensure_gsutil_short(self):
+        version = gsutil.VERSION
+        gsutil_dir = os.path.join(self.tempdir, 'gsutil_%s' % version, 'gsutil')
+        gsutil_bin = os.path.join(gsutil_dir, 'gsutil')
+        gsutil_flag = os.path.join(gsutil_dir, 'install.flag')
+        os.makedirs(gsutil_dir)
+
+        with open(gsutil_bin, 'w') as f:
+            f.write('Foobar')
+        with open(gsutil_flag, 'w') as f:
+            f.write('Barbaz')
+        self.assertEqual(gsutil.ensure_gsutil(version, self.tempdir, False),
+                         gsutil_bin)
+
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 80 - 76
tests/lockfile_test.py

@@ -21,90 +21,94 @@ from testing_support import coverage_utils
 
 import lockfile
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class LockTest(unittest.TestCase):
-  def setUp(self):
-    self.cache_dir = tempfile.mkdtemp(prefix='lockfile')
-    self.addCleanup(shutil.rmtree, self.cache_dir, ignore_errors=True)
-
-  def testLock(self):
-    with lockfile.lock(self.cache_dir):
-      # cached dir locked, attempt to lock it again
-      with self.assertRaises(lockfile.LockError):
+    def setUp(self):
+        self.cache_dir = tempfile.mkdtemp(prefix='lockfile')
+        self.addCleanup(shutil.rmtree, self.cache_dir, ignore_errors=True)
+
+    def testLock(self):
         with lockfile.lock(self.cache_dir):
-          pass
+            # cached dir locked, attempt to lock it again
+            with self.assertRaises(lockfile.LockError):
+                with lockfile.lock(self.cache_dir):
+                    pass
 
-    with lockfile.lock(self.cache_dir):
-      pass
+        with lockfile.lock(self.cache_dir):
+            pass
 
-  @mock.patch('time.sleep')
-  def testLockConcurrent(self, sleep_mock):
-    '''testLockConcurrent simulates what happens when two separate processes try
+    @mock.patch('time.sleep')
+    def testLockConcurrent(self, sleep_mock):
+        '''testLockConcurrent simulates what happens when two separate processes try
     to acquire the same file lock with timeout.'''
-    # Queues q_f1 and q_sleep are used to controll execution of individual
-    # threads.
-    q_f1 = queue.Queue()
-    q_sleep = queue.Queue()
-    results = queue.Queue()
-
-    def side_effect(arg):
-      '''side_effect is called when with l.lock is blocked. In this unit test
+        # Queues q_f1 and q_sleep are used to controll execution of individual
+        # threads.
+        q_f1 = queue.Queue()
+        q_sleep = queue.Queue()
+        results = queue.Queue()
+
+        def side_effect(arg):
+            '''side_effect is called when with l.lock is blocked. In this unit test
       case, it comes from f2.'''
-      logging.debug('sleep: started')
-      q_sleep.put(True)
-      logging.debug('sleep: waiting for q_sleep to be consumed')
-      q_sleep.join()
-      logging.debug('sleep: waiting for result before exiting')
-      results.get(timeout=1)
-      logging.debug('sleep: exiting')
-
-    sleep_mock.side_effect = side_effect
-
-    def f1():
-      '''f1 enters first in l.lock (controlled via q_f1). It then waits for
+            logging.debug('sleep: started')
+            q_sleep.put(True)
+            logging.debug('sleep: waiting for q_sleep to be consumed')
+            q_sleep.join()
+            logging.debug('sleep: waiting for result before exiting')
+            results.get(timeout=1)
+            logging.debug('sleep: exiting')
+
+        sleep_mock.side_effect = side_effect
+
+        def f1():
+            '''f1 enters first in l.lock (controlled via q_f1). It then waits for
       side_effect to put a message in queue q_sleep.'''
-      logging.debug('f1 started, locking')
-
-      with lockfile.lock(self.cache_dir, timeout=1):
-        logging.debug('f1: locked')
-        q_f1.put(True)
-        logging.debug('f1: waiting on q_f1 to be consumed')
-        q_f1.join()
-        logging.debug('f1: done waiting on q_f1, getting q_sleep')
-        q_sleep.get(timeout=1)
-        results.put(True)
-
-      logging.debug('f1: lock released')
-      q_sleep.task_done()
-      logging.debug('f1: exiting')
-
-    def f2():
-      '''f2 enters second in l.lock (controlled by q_f1).'''
-      logging.debug('f2: started, consuming q_f1')
-      q_f1.get(timeout=1)  # wait for f1 to execute lock
-      q_f1.task_done()
-      logging.debug('f2: done waiting for q_f1, locking')
-
-      with lockfile.lock(self.cache_dir, timeout=1):
-        logging.debug('f2: locked')
-        results.put(True)
-
-    t1 = threading.Thread(target=f1)
-    t1.start()
-    t2 = threading.Thread(target=f2)
-    t2.start()
-    t1.join()
-    t2.join()
-
-    # One result was consumed by side_effect, we expect only one in the queue.
-    self.assertEqual(1, results.qsize())
-    sleep_mock.assert_called_once_with(1)
+            logging.debug('f1 started, locking')
+
+            with lockfile.lock(self.cache_dir, timeout=1):
+                logging.debug('f1: locked')
+                q_f1.put(True)
+                logging.debug('f1: waiting on q_f1 to be consumed')
+                q_f1.join()
+                logging.debug('f1: done waiting on q_f1, getting q_sleep')
+                q_sleep.get(timeout=1)
+                results.put(True)
+
+            logging.debug('f1: lock released')
+            q_sleep.task_done()
+            logging.debug('f1: exiting')
+
+        def f2():
+            '''f2 enters second in l.lock (controlled by q_f1).'''
+            logging.debug('f2: started, consuming q_f1')
+            q_f1.get(timeout=1)  # wait for f1 to execute lock
+            q_f1.task_done()
+            logging.debug('f2: done waiting for q_f1, locking')
+
+            with lockfile.lock(self.cache_dir, timeout=1):
+                logging.debug('f2: locked')
+                results.put(True)
+
+        t1 = threading.Thread(target=f1)
+        t1.start()
+        t2 = threading.Thread(target=f2)
+        t2.start()
+        t1.join()
+        t2.join()
+
+        # One result was consumed by side_effect, we expect only one in the
+        # queue.
+        self.assertEqual(1, results.qsize())
+        sleep_mock.assert_called_once_with(1)
 
 
 if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  sys.exit(
-      coverage_utils.covered_main(
-          (os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py')),
-          required_percentage=0))
+    logging.basicConfig(
+        level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
+    sys.exit(
+        coverage_utils.covered_main(
+            (os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py')),
+            required_percentage=0))

+ 791 - 752
tests/metrics_test.py

@@ -15,769 +15,808 @@ sys.path.insert(0, ROOT_DIR)
 import metrics
 import metrics_utils
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class TimeMock(object):
-  def __init__(self):
-    self._count = 0
+    def __init__(self):
+        self._count = 0
 
-  def __call__(self):
-    self._count += 1
-    return self._count * 1000
+    def __call__(self):
+        self._count += 1
+        return self._count * 1000
 
 
 class MetricsCollectorTest(unittest.TestCase):
-  def setUp(self):
-    self.config_file = os.path.join(ROOT_DIR, 'metrics.cfg')
-    self.collector = metrics.MetricsCollector()
-
-    # Keep track of the URL requests, file reads/writes and subprocess spawned.
-    self.urllib_request = mock.Mock()
-    self.print_notice = mock.Mock()
-    self.print_version_change = mock.Mock()
-    self.Popen = mock.Mock()
-    self.FileWrite = mock.Mock()
-    self.FileRead = mock.Mock()
-
-    # So that we don't have to update the tests every time we change the
-    # version.
-    mock.patch('metrics.metrics_utils.CURRENT_VERSION', 0).start()
-    mock.patch('metrics.urllib.request', self.urllib_request).start()
-    mock.patch('metrics.subprocess2.Popen', self.Popen).start()
-    mock.patch('metrics.gclient_utils.FileWrite', self.FileWrite).start()
-    mock.patch('metrics.gclient_utils.FileRead', self.FileRead).start()
-    mock.patch('metrics.metrics_utils.print_notice', self.print_notice).start()
-    mock.patch(
-        'metrics.metrics_utils.print_version_change',
-        self.print_version_change).start()
-
-    # Patch the methods used to get the system information, so we have a known
-    # environment.
-    mock.patch('metrics.time.time',
-               TimeMock()).start()
-    mock.patch('metrics.metrics_utils.get_python_version',
-               lambda: '2.7.13').start()
-    mock.patch(
-        'metrics.gclient_utils.GetOperatingSystem', lambda: 'linux').start()
-    mock.patch('metrics.detect_host_arch.HostArch',
-               lambda: 'x86').start()
-    mock.patch('metrics_utils.get_repo_timestamp',
-               lambda _: 1234).start()
-    mock.patch('metrics_utils.get_git_version',
-               lambda: '2.18.1').start()
-
-    self.maxDiff = None
-    self.default_metrics = {
-        "metrics_version": 0,
-        "python_version": "2.7.13",
-        "git_version": "2.18.1",
-        "execution_time": 1000,
-        "timestamp": 3000,
-        "exit_code": 0,
-        "command": "fun",
-        "depot_tools_age": 1234,
-        "host_arch": "x86",
-        "host_os": "linux",
-    }
-
-    self.addCleanup(mock.patch.stopall)
-
-  def assert_writes_file(self, expected_filename, expected_content):
-    self.assertEqual(len(self.FileWrite.mock_calls), 1)
-    filename, content = self.FileWrite.mock_calls[0][1]
-
-    self.assertEqual(filename, expected_filename)
-    self.assertEqual(json.loads(content), expected_content)
-
-  def test_writes_config_if_not_exists(self):
-    self.FileRead.side_effect = [IOError(2, "No such file or directory")]
-    mock_response = mock.Mock()
-    self.urllib_request.urlopen.side_effect = [mock_response]
-    mock_response.getcode.side_effect = [200]
-
-    self.assertTrue(self.collector.config.is_googler)
-    self.assertIsNone(self.collector.config.opted_in)
-    self.assertEqual(self.collector.config.countdown, 10)
-
-    self.assert_writes_file(
-        self.config_file,
-        {'is-googler': True, 'countdown': 10, 'opt-in': None, 'version': 0})
-
-  def test_writes_config_if_not_exists_non_googler(self):
-    self.FileRead.side_effect = [IOError(2, "No such file or directory")]
-    mock_response = mock.Mock()
-    self.urllib_request.urlopen.side_effect = [mock_response]
-    mock_response.getcode.side_effect = [403]
-
-    self.assertFalse(self.collector.config.is_googler)
-    self.assertIsNone(self.collector.config.opted_in)
-    self.assertEqual(self.collector.config.countdown, 10)
-
-    self.assert_writes_file(
-        self.config_file,
-        {'is-googler': False, 'countdown': 10, 'opt-in': None, 'version': 0})
-
-  def test_disables_metrics_if_cant_write_config(self):
-    self.FileRead.side_effect = [IOError(2, 'No such file or directory')]
-    mock_response = mock.Mock()
-    self.urllib_request.urlopen.side_effect = [mock_response]
-    mock_response.getcode.side_effect = [200]
-    self.FileWrite.side_effect = [IOError(13, 'Permission denied.')]
-
-    self.assertTrue(self.collector.config.is_googler)
-    self.assertFalse(self.collector.config.opted_in)
-    self.assertEqual(self.collector.config.countdown, 10)
-
-  def assert_collects_metrics(self, update_metrics=None):
-    expected_metrics = self.default_metrics
-    self.default_metrics.update(update_metrics or {})
-    # Assert we invoked the script to upload them.
-    self.Popen.assert_called_with(
-        ['vpython3', metrics.UPLOAD_SCRIPT], stdin=metrics.subprocess2.PIPE)
-    # Assert we collected the right metrics.
-    write_call = self.Popen.return_value.stdin.write.call_args
-    collected_metrics = json.loads(write_call[0][0])
-    self.assertTrue(self.collector.collecting_metrics)
-    self.assertEqual(collected_metrics, expected_metrics)
-
-  def test_collects_system_information(self):
-    """Tests that we collect information about the runtime environment."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      pass
-
-    fun()
-    self.assert_collects_metrics()
-
-  def test_collects_added_metrics(self):
-    """Tests that we can collect custom metrics."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      self.collector.add('foo', 'bar')
-
-    fun()
-    self.assert_collects_metrics({'foo': 'bar'})
-
-  def test_collects_metrics_when_opted_in(self):
-    """Tests that metrics are collected when the user opts-in."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 1234, "opt-in": true, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      pass
-
-    fun()
-    self.assert_collects_metrics()
-
-  @mock.patch('metrics_utils.REPORT_BUILD', 'p/b/b/1')
-  def test_collects_metrics_report_build_set(self):
-    """Tests that metrics are collected when REPORT_BUILD is set."""
-    @self.collector.collect_metrics('fun')
-    def fun():
-      pass
-
-    fun()
-    self.assert_collects_metrics({
-        'bot_metrics': {
-            'build_id': 1,
-            'builder': {
-                'project': 'p',
-                'bucket': 'b',
-                'builder': 'b',
-            }
+    def setUp(self):
+        self.config_file = os.path.join(ROOT_DIR, 'metrics.cfg')
+        self.collector = metrics.MetricsCollector()
+
+        # Keep track of the URL requests, file reads/writes and subprocess
+        # spawned.
+        self.urllib_request = mock.Mock()
+        self.print_notice = mock.Mock()
+        self.print_version_change = mock.Mock()
+        self.Popen = mock.Mock()
+        self.FileWrite = mock.Mock()
+        self.FileRead = mock.Mock()
+
+        # So that we don't have to update the tests every time we change the
+        # version.
+        mock.patch('metrics.metrics_utils.CURRENT_VERSION', 0).start()
+        mock.patch('metrics.urllib.request', self.urllib_request).start()
+        mock.patch('metrics.subprocess2.Popen', self.Popen).start()
+        mock.patch('metrics.gclient_utils.FileWrite', self.FileWrite).start()
+        mock.patch('metrics.gclient_utils.FileRead', self.FileRead).start()
+        mock.patch('metrics.metrics_utils.print_notice',
+                   self.print_notice).start()
+        mock.patch('metrics.metrics_utils.print_version_change',
+                   self.print_version_change).start()
+
+        # Patch the methods used to get the system information, so we have a
+        # known environment.
+        mock.patch('metrics.time.time', TimeMock()).start()
+        mock.patch('metrics.metrics_utils.get_python_version',
+                   lambda: '2.7.13').start()
+        mock.patch('metrics.gclient_utils.GetOperatingSystem',
+                   lambda: 'linux').start()
+        mock.patch('metrics.detect_host_arch.HostArch', lambda: 'x86').start()
+        mock.patch('metrics_utils.get_repo_timestamp', lambda _: 1234).start()
+        mock.patch('metrics_utils.get_git_version', lambda: '2.18.1').start()
+
+        self.maxDiff = None
+        self.default_metrics = {
+            "metrics_version": 0,
+            "python_version": "2.7.13",
+            "git_version": "2.18.1",
+            "execution_time": 1000,
+            "timestamp": 3000,
+            "exit_code": 0,
+            "command": "fun",
+            "depot_tools_age": 1234,
+            "host_arch": "x86",
+            "host_os": "linux",
         }
-    })
-    # We shouldn't have tried to read the config file.
-    self.assertFalse(self.FileRead.called)
-
-  @mock.patch('metrics_utils.COLLECT_METRICS', False)
-  def test_metrics_collection_disabled(self):
-    """Tests that metrics collection can be disabled via a global variable."""
-    @self.collector.collect_metrics('fun')
-    def fun():
-      pass
-
-    fun()
-
-    self.assertFalse(self.collector.collecting_metrics)
-    # We shouldn't have tried to read the config file.
-    self.assertFalse(self.FileRead.called)
-    # Nor tried to upload any metrics.
-    self.assertFalse(self.Popen.called)
-
-  def test_metrics_collection_disabled_not_googler(self):
-    """Tests that metrics collection is disabled for non googlers."""
-    self.FileRead.side_effect = [
-        '{"is-googler": false, "countdown": 0, "opt-in": null, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      pass
-
-    fun()
-
-    self.assertFalse(self.collector.collecting_metrics)
-    self.assertFalse(self.collector.config.is_googler)
-    self.assertIsNone(self.collector.config.opted_in)
-    self.assertEqual(self.collector.config.countdown, 0)
-    # Assert that we did not try to upload any metrics.
-    self.assertFalse(self.Popen.called)
-
-  def test_metrics_collection_disabled_opted_out(self):
-    """Tests that metrics collection is disabled if the user opts out."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": false, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      pass
-
-    fun()
-
-    self.assertFalse(self.collector.collecting_metrics)
-    self.assertTrue(self.collector.config.is_googler)
-    self.assertFalse(self.collector.config.opted_in)
-    self.assertEqual(self.collector.config.countdown, 0)
-    # Assert that we did not try to upload any metrics.
-    self.assertFalse(self.Popen.called)
-
-  def test_metrics_collection_disabled_non_zero_countdown(self):
-    """Tests that metrics collection is disabled until the countdown expires."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 1, "opt-in": null, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      pass
-
-    fun()
-
-    self.assertFalse(self.collector.collecting_metrics)
-    self.assertTrue(self.collector.config.is_googler)
-    self.assertFalse(self.collector.config.opted_in)
-    self.assertEqual(self.collector.config.countdown, 1)
-    # Assert that we did not try to upload any metrics.
-    self.assertFalse(self.Popen.called)
-
-  def test_handles_exceptions(self):
-    """Tests that exception are caught and we exit with an appropriate code."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      raise ValueError
-
-    # When an exception is raised, we should catch it, update exit-code,
-    # collect metrics, and re-raise it.
-    with self.assertRaises(ValueError):
-      fun()
-    self.assert_collects_metrics({'exit_code': 1})
-
-  def test_handles_system_exit(self):
-    """Tests that the sys.exit code is respected and metrics are collected."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      sys.exit(0)
-
-    # When an exception is raised, we should catch it, update exit-code,
-    # collect metrics, and re-raise it.
-    with self.assertRaises(SystemExit) as cm:
-      fun()
-    self.assertEqual(cm.exception.code, 0)
-    self.assert_collects_metrics({'exit_code': 0})
-
-  def test_handles_keyboard_interrupt(self):
-    """Tests that KeyboardInterrupt exits with 130 and metrics are collected."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
-    ]
-
-    @self.collector.collect_metrics('fun')
-    def fun():
-      raise KeyboardInterrupt
-
-    # When an exception is raised, we should catch it, update exit-code,
-    # collect metrics, and re-raise it.
-    with self.assertRaises(KeyboardInterrupt):
-      fun()
-    self.assert_collects_metrics({'exit_code': 130})
-
-  def test_handles_system_exit_non_zero(self):
-    """Tests that the sys.exit code is respected and metrics are collected."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
-    ]
-    @self.collector.collect_metrics('fun')
-    def fun():
-      sys.exit(123)
-
-    # When an exception is raised, we should catch it, update exit-code,
-    # collect metrics, and re-raise it.
-    with self.assertRaises(SystemExit) as cm:
-      fun()
-    self.assertEqual(cm.exception.code, 123)
-    self.assert_collects_metrics({'exit_code': 123})
-
-  def test_prints_notice_non_zero_countdown(self):
-    """Tests that a notice is printed while the countdown is non-zero."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 1234, "opt-in": null, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        pass
-    self.assertEqual(cm.exception.code, 0)
-    self.print_notice.assert_called_once_with(1234)
-
-  def test_prints_notice_zero_countdown(self):
-    """Tests that a notice is printed when the countdown reaches 0."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        pass
-    self.assertEqual(cm.exception.code, 0)
-    self.print_notice.assert_called_once_with(0)
-
-  def test_doesnt_print_notice_opted_in(self):
-    """Tests that a notice is not printed when the user opts-in."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        pass
-    self.assertEqual(cm.exception.code, 0)
-    self.assertFalse(self.print_notice.called)
-
-  def test_doesnt_print_notice_opted_out(self):
-    """Tests that a notice is not printed when the user opts-out."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": false, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        pass
-    self.assertEqual(cm.exception.code, 0)
-    self.assertFalse(self.print_notice.called)
-
-  @mock.patch('metrics_utils.COLLECT_METRICS', False)
-  def test_doesnt_print_notice_disable_metrics_collection(self):
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        pass
-    self.assertEqual(cm.exception.code, 0)
-    self.assertFalse(self.print_notice.called)
-    # We shouldn't have tried to read the config file.
-    self.assertFalse(self.FileRead.called)
-
-  @mock.patch('metrics_utils.REPORT_BUILD', 'p/b/b/1')
-  def test_doesnt_print_notice_report_build(self):
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        pass
-    self.assertEqual(cm.exception.code, 0)
-    self.assertFalse(self.print_notice.called)
-    # We shouldn't have tried to read the config file.
-    self.assertFalse(self.FileRead.called)
-
-  def test_print_notice_handles_exceptions(self):
-    """Tests that exception are caught and we exit with an appropriate code."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
-    ]
-    # print_notice should catch the exception, print it and invoke sys.exit()
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        raise ValueError
-    self.assertEqual(cm.exception.code, 1)
-    self.assertTrue(self.print_notice.called)
-
-  def test_print_notice_handles_system_exit(self):
-    """Tests that the sys.exit code is respected and a notice is displayed."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
-    ]
-    # print_notice should catch the exception, print it and invoke sys.exit()
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        sys.exit(0)
-    self.assertEqual(cm.exception.code, 0)
-    self.assertTrue(self.print_notice.called)
-
-  def test_print_notice_handles_system_exit_non_zero(self):
-    """Tests that the sys.exit code is respected and a notice is displayed."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
-    ]
-    # When an exception is raised, we should catch it, update exit-code,
-    # collect metrics, and re-raise it.
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        sys.exit(123)
-    self.assertEqual(cm.exception.code, 123)
-    self.assertTrue(self.print_notice.called)
-
-  def test_counts_down(self):
-    """Tests that the countdown works correctly."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 10, "opt-in": null, "version": 0}'
-    ]
-
-    # We define multiple functions to ensure it has no impact on countdown.
-    @self.collector.collect_metrics('barn')
-    def _barn():
-      pass
-    @self.collector.collect_metrics('fun')
-    def _fun():
-      pass
-    def foo_main():
-      pass
-
-    # Assert that the countdown hasn't decrease yet.
-    self.assertFalse(self.FileWrite.called)
-    self.assertEqual(self.collector.config.countdown, 10)
-
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        foo_main()
-    self.assertEqual(cm.exception.code, 0)
-
-    # Assert that the countdown decreased by one, and the config file was
-    # updated.
-    self.assertEqual(self.collector.config.countdown, 9)
-    self.print_notice.assert_called_once_with(10)
-
-    self.assert_writes_file(
-        self.config_file,
-        {'is-googler': True, 'countdown': 9, 'opt-in': None, 'version': 0})
-
-  def test_nested_functions(self):
-    """Tests that a function can call another function for which metrics are
+
+        self.addCleanup(mock.patch.stopall)
+
+    def assert_writes_file(self, expected_filename, expected_content):
+        self.assertEqual(len(self.FileWrite.mock_calls), 1)
+        filename, content = self.FileWrite.mock_calls[0][1]
+
+        self.assertEqual(filename, expected_filename)
+        self.assertEqual(json.loads(content), expected_content)
+
+    def test_writes_config_if_not_exists(self):
+        self.FileRead.side_effect = [IOError(2, "No such file or directory")]
+        mock_response = mock.Mock()
+        self.urllib_request.urlopen.side_effect = [mock_response]
+        mock_response.getcode.side_effect = [200]
+
+        self.assertTrue(self.collector.config.is_googler)
+        self.assertIsNone(self.collector.config.opted_in)
+        self.assertEqual(self.collector.config.countdown, 10)
+
+        self.assert_writes_file(self.config_file, {
+            'is-googler': True,
+            'countdown': 10,
+            'opt-in': None,
+            'version': 0
+        })
+
+    def test_writes_config_if_not_exists_non_googler(self):
+        self.FileRead.side_effect = [IOError(2, "No such file or directory")]
+        mock_response = mock.Mock()
+        self.urllib_request.urlopen.side_effect = [mock_response]
+        mock_response.getcode.side_effect = [403]
+
+        self.assertFalse(self.collector.config.is_googler)
+        self.assertIsNone(self.collector.config.opted_in)
+        self.assertEqual(self.collector.config.countdown, 10)
+
+        self.assert_writes_file(self.config_file, {
+            'is-googler': False,
+            'countdown': 10,
+            'opt-in': None,
+            'version': 0
+        })
+
+    def test_disables_metrics_if_cant_write_config(self):
+        self.FileRead.side_effect = [IOError(2, 'No such file or directory')]
+        mock_response = mock.Mock()
+        self.urllib_request.urlopen.side_effect = [mock_response]
+        mock_response.getcode.side_effect = [200]
+        self.FileWrite.side_effect = [IOError(13, 'Permission denied.')]
+
+        self.assertTrue(self.collector.config.is_googler)
+        self.assertFalse(self.collector.config.opted_in)
+        self.assertEqual(self.collector.config.countdown, 10)
+
+    def assert_collects_metrics(self, update_metrics=None):
+        expected_metrics = self.default_metrics
+        self.default_metrics.update(update_metrics or {})
+        # Assert we invoked the script to upload them.
+        self.Popen.assert_called_with(['vpython3', metrics.UPLOAD_SCRIPT],
+                                      stdin=metrics.subprocess2.PIPE)
+        # Assert we collected the right metrics.
+        write_call = self.Popen.return_value.stdin.write.call_args
+        collected_metrics = json.loads(write_call[0][0])
+        self.assertTrue(self.collector.collecting_metrics)
+        self.assertEqual(collected_metrics, expected_metrics)
+
+    def test_collects_system_information(self):
+        """Tests that we collect information about the runtime environment."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            pass
+
+        fun()
+        self.assert_collects_metrics()
+
+    def test_collects_added_metrics(self):
+        """Tests that we can collect custom metrics."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            self.collector.add('foo', 'bar')
+
+        fun()
+        self.assert_collects_metrics({'foo': 'bar'})
+
+    def test_collects_metrics_when_opted_in(self):
+        """Tests that metrics are collected when the user opts-in."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 1234, "opt-in": true, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            pass
+
+        fun()
+        self.assert_collects_metrics()
+
+    @mock.patch('metrics_utils.REPORT_BUILD', 'p/b/b/1')
+    def test_collects_metrics_report_build_set(self):
+        """Tests that metrics are collected when REPORT_BUILD is set."""
+        @self.collector.collect_metrics('fun')
+        def fun():
+            pass
+
+        fun()
+        self.assert_collects_metrics({
+            'bot_metrics': {
+                'build_id': 1,
+                'builder': {
+                    'project': 'p',
+                    'bucket': 'b',
+                    'builder': 'b',
+                }
+            }
+        })
+        # We shouldn't have tried to read the config file.
+        self.assertFalse(self.FileRead.called)
+
+    @mock.patch('metrics_utils.COLLECT_METRICS', False)
+    def test_metrics_collection_disabled(self):
+        """Tests that metrics collection can be disabled via a global variable."""
+        @self.collector.collect_metrics('fun')
+        def fun():
+            pass
+
+        fun()
+
+        self.assertFalse(self.collector.collecting_metrics)
+        # We shouldn't have tried to read the config file.
+        self.assertFalse(self.FileRead.called)
+        # Nor tried to upload any metrics.
+        self.assertFalse(self.Popen.called)
+
+    def test_metrics_collection_disabled_not_googler(self):
+        """Tests that metrics collection is disabled for non googlers."""
+        self.FileRead.side_effect = [
+            '{"is-googler": false, "countdown": 0, "opt-in": null, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            pass
+
+        fun()
+
+        self.assertFalse(self.collector.collecting_metrics)
+        self.assertFalse(self.collector.config.is_googler)
+        self.assertIsNone(self.collector.config.opted_in)
+        self.assertEqual(self.collector.config.countdown, 0)
+        # Assert that we did not try to upload any metrics.
+        self.assertFalse(self.Popen.called)
+
+    def test_metrics_collection_disabled_opted_out(self):
+        """Tests that metrics collection is disabled if the user opts out."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": false, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            pass
+
+        fun()
+
+        self.assertFalse(self.collector.collecting_metrics)
+        self.assertTrue(self.collector.config.is_googler)
+        self.assertFalse(self.collector.config.opted_in)
+        self.assertEqual(self.collector.config.countdown, 0)
+        # Assert that we did not try to upload any metrics.
+        self.assertFalse(self.Popen.called)
+
+    def test_metrics_collection_disabled_non_zero_countdown(self):
+        """Tests that metrics collection is disabled until the countdown expires."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 1, "opt-in": null, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            pass
+
+        fun()
+
+        self.assertFalse(self.collector.collecting_metrics)
+        self.assertTrue(self.collector.config.is_googler)
+        self.assertFalse(self.collector.config.opted_in)
+        self.assertEqual(self.collector.config.countdown, 1)
+        # Assert that we did not try to upload any metrics.
+        self.assertFalse(self.Popen.called)
+
+    def test_handles_exceptions(self):
+        """Tests that exception are caught and we exit with an appropriate code."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            raise ValueError
+
+        # When an exception is raised, we should catch it, update exit-code,
+        # collect metrics, and re-raise it.
+        with self.assertRaises(ValueError):
+            fun()
+        self.assert_collects_metrics({'exit_code': 1})
+
+    def test_handles_system_exit(self):
+        """Tests that the sys.exit code is respected and metrics are collected."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            sys.exit(0)
+
+        # When an exception is raised, we should catch it, update exit-code,
+        # collect metrics, and re-raise it.
+        with self.assertRaises(SystemExit) as cm:
+            fun()
+        self.assertEqual(cm.exception.code, 0)
+        self.assert_collects_metrics({'exit_code': 0})
+
+    def test_handles_keyboard_interrupt(self):
+        """Tests that KeyboardInterrupt exits with 130 and metrics are collected."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            raise KeyboardInterrupt
+
+        # When an exception is raised, we should catch it, update exit-code,
+        # collect metrics, and re-raise it.
+        with self.assertRaises(KeyboardInterrupt):
+            fun()
+        self.assert_collects_metrics({'exit_code': 130})
+
+    def test_handles_system_exit_non_zero(self):
+        """Tests that the sys.exit code is respected and metrics are collected."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            sys.exit(123)
+
+        # When an exception is raised, we should catch it, update exit-code,
+        # collect metrics, and re-raise it.
+        with self.assertRaises(SystemExit) as cm:
+            fun()
+        self.assertEqual(cm.exception.code, 123)
+        self.assert_collects_metrics({'exit_code': 123})
+
+    def test_prints_notice_non_zero_countdown(self):
+        """Tests that a notice is printed while the countdown is non-zero."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 1234, "opt-in": null, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                pass
+        self.assertEqual(cm.exception.code, 0)
+        self.print_notice.assert_called_once_with(1234)
+
+    def test_prints_notice_zero_countdown(self):
+        """Tests that a notice is printed when the countdown reaches 0."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                pass
+        self.assertEqual(cm.exception.code, 0)
+        self.print_notice.assert_called_once_with(0)
+
+    def test_doesnt_print_notice_opted_in(self):
+        """Tests that a notice is not printed when the user opts-in."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                pass
+        self.assertEqual(cm.exception.code, 0)
+        self.assertFalse(self.print_notice.called)
+
+    def test_doesnt_print_notice_opted_out(self):
+        """Tests that a notice is not printed when the user opts-out."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": false, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                pass
+        self.assertEqual(cm.exception.code, 0)
+        self.assertFalse(self.print_notice.called)
+
+    @mock.patch('metrics_utils.COLLECT_METRICS', False)
+    def test_doesnt_print_notice_disable_metrics_collection(self):
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                pass
+        self.assertEqual(cm.exception.code, 0)
+        self.assertFalse(self.print_notice.called)
+        # We shouldn't have tried to read the config file.
+        self.assertFalse(self.FileRead.called)
+
+    @mock.patch('metrics_utils.REPORT_BUILD', 'p/b/b/1')
+    def test_doesnt_print_notice_report_build(self):
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                pass
+        self.assertEqual(cm.exception.code, 0)
+        self.assertFalse(self.print_notice.called)
+        # We shouldn't have tried to read the config file.
+        self.assertFalse(self.FileRead.called)
+
+    def test_print_notice_handles_exceptions(self):
+        """Tests that exception are caught and we exit with an appropriate code."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
+        ]
+        # print_notice should catch the exception, print it and invoke
+        # sys.exit()
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                raise ValueError
+        self.assertEqual(cm.exception.code, 1)
+        self.assertTrue(self.print_notice.called)
+
+    def test_print_notice_handles_system_exit(self):
+        """Tests that the sys.exit code is respected and a notice is displayed."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
+        ]
+        # print_notice should catch the exception, print it and invoke
+        # sys.exit()
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                sys.exit(0)
+        self.assertEqual(cm.exception.code, 0)
+        self.assertTrue(self.print_notice.called)
+
+    def test_print_notice_handles_system_exit_non_zero(self):
+        """Tests that the sys.exit code is respected and a notice is displayed."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
+        ]
+        # When an exception is raised, we should catch it, update exit-code,
+        # collect metrics, and re-raise it.
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                sys.exit(123)
+        self.assertEqual(cm.exception.code, 123)
+        self.assertTrue(self.print_notice.called)
+
+    def test_counts_down(self):
+        """Tests that the countdown works correctly."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 10, "opt-in": null, "version": 0}'
+        ]
+
+        # We define multiple functions to ensure it has no impact on countdown.
+        @self.collector.collect_metrics('barn')
+        def _barn():
+            pass
+
+        @self.collector.collect_metrics('fun')
+        def _fun():
+            pass
+
+        def foo_main():
+            pass
+
+        # Assert that the countdown hasn't decrease yet.
+        self.assertFalse(self.FileWrite.called)
+        self.assertEqual(self.collector.config.countdown, 10)
+
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                foo_main()
+        self.assertEqual(cm.exception.code, 0)
+
+        # Assert that the countdown decreased by one, and the config file was
+        # updated.
+        self.assertEqual(self.collector.config.countdown, 9)
+        self.print_notice.assert_called_once_with(10)
+
+        self.assert_writes_file(self.config_file, {
+            'is-googler': True,
+            'countdown': 9,
+            'opt-in': None,
+            'version': 0
+        })
+
+    def test_nested_functions(self):
+        """Tests that a function can call another function for which metrics are
     collected."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
-    ]
-
-    @self.collector.collect_metrics('barn')
-    def barn():
-      self.collector.add('barn-metric', 1)
-      return 1000
-    @self.collector.collect_metrics('fun')
-    def fun():
-      result = barn()
-      self.collector.add('fun-metric', result + 1)
-
-    fun()
-
-    # Assert that we collected metrics for fun, but not for barn.
-    self.assert_collects_metrics({'fun-metric': 1001})
-
-  @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
-  def test_version_change_from_hasnt_decided(self):
-    # The user has not decided yet, and the countdown hasn't reached 0, so we're
-    # not collecting metrics.
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 9, "opt-in": null, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        self.collector.add('foo-metric', 1)
-
-    self.assertEqual(cm.exception.code, 0)
-    # We display the notice informing the user of the changes.
-    self.print_version_change.assert_called_once_with(0)
-    # But the countdown is not reset.
-    self.assert_writes_file(
-        self.config_file,
-        {'is-googler': True, 'countdown': 8, 'opt-in': None, 'version': 0})
-    # And no metrics are uploaded.
-    self.assertFalse(self.Popen.called)
-
-  @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
-  def test_version_change_from_opted_in_by_default(self):
-    # The user has not decided yet, but the countdown has reached 0, and we're
-    # collecting metrics.
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        self.collector.add('foo-metric', 1)
-
-    self.assertEqual(cm.exception.code, 0)
-    # We display the notice informing the user of the changes.
-    self.print_version_change.assert_called_once_with(0)
-    # We reset the countdown.
-    self.assert_writes_file(
-        self.config_file,
-        {'is-googler': True, 'countdown': 9, 'opt-in': None, 'version': 0})
-    # No metrics are uploaded.
-    self.assertFalse(self.Popen.called)
-
-  @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
-  def test_version_change_from_opted_in(self):
-    # The user has opted in, and we're collecting metrics.
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        self.collector.add('foo-metric', 1)
-
-    self.assertEqual(cm.exception.code, 0)
-    # We display the notice informing the user of the changes.
-    self.print_version_change.assert_called_once_with(0)
-    # We reset the countdown.
-    self.assert_writes_file(
-        self.config_file,
-        {'is-googler': True, 'countdown': 9, 'opt-in': None, 'version': 0})
-    # No metrics are uploaded.
-    self.assertFalse(self.Popen.called)
-
-  @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
-  def test_version_change_from_opted_out(self):
-    # The user has opted out and we're not collecting metrics.
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": false, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        self.collector.add('foo-metric', 1)
-
-    self.assertEqual(cm.exception.code, 0)
-    # We don't display any notice.
-    self.assertFalse(self.print_version_change.called)
-    self.assertFalse(self.print_notice.called)
-    # We don't upload any metrics.
-    self.assertFalse(self.Popen.called)
-    # We don't modify the config.
-    self.assertFalse(self.FileWrite.called)
-
-  @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
-  def test_version_change_non_googler(self):
-    # The user is not a googler and we're not collecting metrics.
-    self.FileRead.side_effect = [
-        '{"is-googler": false, "countdown": 10, "opt-in": null, "version": 0}'
-    ]
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        self.collector.add('foo-metric', 1)
-
-    self.assertEqual(cm.exception.code, 0)
-    # We don't display any notice.
-    self.assertFalse(self.print_version_change.called)
-    self.assertFalse(self.print_notice.called)
-    # We don't upload any metrics.
-    self.assertFalse(self.Popen.called)
-    # We don't modify the config.
-    self.assertFalse(self.FileWrite.called)
-
-  @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
-  def test_opting_in_updates_version(self):
-    # The user is seeing the notice telling him of the version changes.
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 8, "opt-in": null, "version": 0}'
-    ]
-
-    self.collector.config.opted_in = True
-
-    # We don't display any notice.
-    self.assertFalse(self.print_version_change.called)
-    self.assertFalse(self.print_notice.called)
-    # We don't upload any metrics.
-    self.assertFalse(self.Popen.called)
-    # We update the version and opt-in the user.
-    self.assert_writes_file(
-        self.config_file,
-        {'is-googler': True, 'countdown': 8, 'opt-in': True, 'version': 5})
-
-  @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
-  def test_opting_in_by_default_updates_version(self):
-    # The user will be opted in by default on the next execution.
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 1, "opt-in": null, "version": 0}'
-    ]
-
-    with self.assertRaises(SystemExit) as cm:
-      with self.collector.print_notice_and_exit():
-        self.collector.add('foo-metric', 1)
-
-    self.assertEqual(cm.exception.code, 0)
-    # We display the notices.
-    self.print_notice.assert_called_once_with(1)
-    self.print_version_change.assert_called_once_with(0)
-    # We don't upload any metrics.
-    self.assertFalse(self.Popen.called)
-    # We update the version and set the countdown to 0. In subsequent runs,
-    # we'll start collecting metrics.
-    self.assert_writes_file(
-        self.config_file,
-        {'is-googler': True, 'countdown': 0, 'opt-in': None, 'version': 5})
-
-  def test_add_repeated(self):
-    """Tests that we can add repeated metrics."""
-    self.FileRead.side_effect = [
-        '{"is-googler": true, "countdown": 0, "opt-in": true}'
-    ]
-
-    @self.collector.collect_metrics('fun')
-    def fun():
-      self.collector.add_repeated('fun', 1)
-      self.collector.add_repeated('fun', 2)
-      self.collector.add_repeated('fun', 5)
-
-    fun()
-
-    # Assert that we collected all metrics for fun.
-    self.assert_collects_metrics({'fun': [1, 2, 5]})
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
+        ]
+
+        @self.collector.collect_metrics('barn')
+        def barn():
+            self.collector.add('barn-metric', 1)
+            return 1000
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            result = barn()
+            self.collector.add('fun-metric', result + 1)
+
+        fun()
+
+        # Assert that we collected metrics for fun, but not for barn.
+        self.assert_collects_metrics({'fun-metric': 1001})
+
+    @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
+    def test_version_change_from_hasnt_decided(self):
+        # The user has not decided yet, and the countdown hasn't reached 0, so
+        # we're not collecting metrics.
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 9, "opt-in": null, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                self.collector.add('foo-metric', 1)
+
+        self.assertEqual(cm.exception.code, 0)
+        # We display the notice informing the user of the changes.
+        self.print_version_change.assert_called_once_with(0)
+        # But the countdown is not reset.
+        self.assert_writes_file(self.config_file, {
+            'is-googler': True,
+            'countdown': 8,
+            'opt-in': None,
+            'version': 0
+        })
+        # And no metrics are uploaded.
+        self.assertFalse(self.Popen.called)
+
+    @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
+    def test_version_change_from_opted_in_by_default(self):
+        # The user has not decided yet, but the countdown has reached 0, and
+        # we're collecting metrics.
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": null, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                self.collector.add('foo-metric', 1)
+
+        self.assertEqual(cm.exception.code, 0)
+        # We display the notice informing the user of the changes.
+        self.print_version_change.assert_called_once_with(0)
+        # We reset the countdown.
+        self.assert_writes_file(self.config_file, {
+            'is-googler': True,
+            'countdown': 9,
+            'opt-in': None,
+            'version': 0
+        })
+        # No metrics are uploaded.
+        self.assertFalse(self.Popen.called)
+
+    @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
+    def test_version_change_from_opted_in(self):
+        # The user has opted in, and we're collecting metrics.
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": true, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                self.collector.add('foo-metric', 1)
+
+        self.assertEqual(cm.exception.code, 0)
+        # We display the notice informing the user of the changes.
+        self.print_version_change.assert_called_once_with(0)
+        # We reset the countdown.
+        self.assert_writes_file(self.config_file, {
+            'is-googler': True,
+            'countdown': 9,
+            'opt-in': None,
+            'version': 0
+        })
+        # No metrics are uploaded.
+        self.assertFalse(self.Popen.called)
+
+    @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
+    def test_version_change_from_opted_out(self):
+        # The user has opted out and we're not collecting metrics.
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": false, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                self.collector.add('foo-metric', 1)
+
+        self.assertEqual(cm.exception.code, 0)
+        # We don't display any notice.
+        self.assertFalse(self.print_version_change.called)
+        self.assertFalse(self.print_notice.called)
+        # We don't upload any metrics.
+        self.assertFalse(self.Popen.called)
+        # We don't modify the config.
+        self.assertFalse(self.FileWrite.called)
+
+    @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
+    def test_version_change_non_googler(self):
+        # The user is not a googler and we're not collecting metrics.
+        self.FileRead.side_effect = [
+            '{"is-googler": false, "countdown": 10, "opt-in": null, "version": 0}'
+        ]
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                self.collector.add('foo-metric', 1)
+
+        self.assertEqual(cm.exception.code, 0)
+        # We don't display any notice.
+        self.assertFalse(self.print_version_change.called)
+        self.assertFalse(self.print_notice.called)
+        # We don't upload any metrics.
+        self.assertFalse(self.Popen.called)
+        # We don't modify the config.
+        self.assertFalse(self.FileWrite.called)
+
+    @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
+    def test_opting_in_updates_version(self):
+        # The user is seeing the notice telling him of the version changes.
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 8, "opt-in": null, "version": 0}'
+        ]
+
+        self.collector.config.opted_in = True
+
+        # We don't display any notice.
+        self.assertFalse(self.print_version_change.called)
+        self.assertFalse(self.print_notice.called)
+        # We don't upload any metrics.
+        self.assertFalse(self.Popen.called)
+        # We update the version and opt-in the user.
+        self.assert_writes_file(self.config_file, {
+            'is-googler': True,
+            'countdown': 8,
+            'opt-in': True,
+            'version': 5
+        })
+
+    @mock.patch('metrics.metrics_utils.CURRENT_VERSION', 5)
+    def test_opting_in_by_default_updates_version(self):
+        # The user will be opted in by default on the next execution.
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 1, "opt-in": null, "version": 0}'
+        ]
+
+        with self.assertRaises(SystemExit) as cm:
+            with self.collector.print_notice_and_exit():
+                self.collector.add('foo-metric', 1)
+
+        self.assertEqual(cm.exception.code, 0)
+        # We display the notices.
+        self.print_notice.assert_called_once_with(1)
+        self.print_version_change.assert_called_once_with(0)
+        # We don't upload any metrics.
+        self.assertFalse(self.Popen.called)
+        # We update the version and set the countdown to 0. In subsequent runs,
+        # we'll start collecting metrics.
+        self.assert_writes_file(self.config_file, {
+            'is-googler': True,
+            'countdown': 0,
+            'opt-in': None,
+            'version': 5
+        })
+
+    def test_add_repeated(self):
+        """Tests that we can add repeated metrics."""
+        self.FileRead.side_effect = [
+            '{"is-googler": true, "countdown": 0, "opt-in": true}'
+        ]
+
+        @self.collector.collect_metrics('fun')
+        def fun():
+            self.collector.add_repeated('fun', 1)
+            self.collector.add_repeated('fun', 2)
+            self.collector.add_repeated('fun', 5)
+
+        fun()
+
+        # Assert that we collected all metrics for fun.
+        self.assert_collects_metrics({'fun': [1, 2, 5]})
 
 
 class MetricsUtilsTest(unittest.TestCase):
-
-  def test_extracts_host(self):
-    """Test that we extract the host from the requested URI."""
-    # Regular case
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://chromium-review.googlesource.com/foo/bar?q=baz', '', 0, 0)
-    self.assertEqual('chromium-review.googlesource.com', http_metrics['host'])
-
-    # Unexpected host
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://foo-review.googlesource.com/', '', 0, 0)
-    self.assertNotIn('host', http_metrics)
-
-  def test_extracts_path(self):
-    """Test that we extract the matching path from the requested URI."""
-    # Regular case
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/changes/1234/revisions/deadbeef/commit',
-        '', 0, 0)
-    self.assertEqual('changes/revisions/commit', http_metrics['path'])
-
-    # No matching paths
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/changes/1234/unexpected/path', '', 0, 0)
-    self.assertNotIn('path', http_metrics)
-
-  def test_extracts_path_changes(self):
-    """Tests that we extract paths for /changes/."""
-    # /changes/<change-id>
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/changes/proj%2Fsrc%7Emain%7EI1234abcd',
-        '', 0, 0)
-    self.assertEqual('changes', http_metrics['path'])
-
-    # /changes/?q=<something>
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/changes/?q=owner:me+OR+cc:me',
-        '', 0, 0)
-    self.assertEqual('changes', http_metrics['path'])
-
-    # /changes/#<something>
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/changes/#something',
-        '', 0, 0)
-    self.assertEqual('changes', http_metrics['path'])
-
-    # /changes/<change-id>/<anything> does not map to changes.
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/changes/12345678/message',
-        '', 0, 0)
-    self.assertNotEqual('changes', http_metrics['path'])
-
-  def test_extracts_arguments(self):
-    """Test that we can extract arguments from the requested URI."""
-    # Regular case
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/?q=123&foo=bar&o=ALL_REVISIONS', '', 0, 0)
-    self.assertEqual(['ALL_REVISIONS'], http_metrics['arguments'])
-
-    # Some unexpected arguments are filtered out.
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/?o=ALL_REVISIONS&o=LABELS&o=UNEXPECTED',
-        '', 0, 0)
-    self.assertEqual(['ALL_REVISIONS', 'LABELS'], http_metrics['arguments'])
-
-    # No valid arguments, so arguments is not present
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/?o=bar&baz=1', '', 0, 0)
-    self.assertNotIn('arguments', http_metrics)
-
-    # No valid arguments, so arguments is not present
-    http_metrics = metrics_utils.extract_http_metrics(
-        'https://review.example.com/?foo=bar&baz=1', '', 0, 0)
-    self.assertNotIn('arguments', http_metrics)
-
-  def test_validates_method(self):
-    """Test that we validate the HTTP method used."""
-    # Regular case
-    http_metrics = metrics_utils.extract_http_metrics('', 'POST', 0, 0)
-    self.assertEqual('POST', http_metrics['method'])
-
-    # Unexpected method is not reported
-    http_metrics = metrics_utils.extract_http_metrics('', 'DEMAND', 0, 0)
-    self.assertNotIn('method', http_metrics)
-
-  def test_status(self):
-    """Tests that the response status we passed is returned."""
-    http_metrics = metrics_utils.extract_http_metrics('', '', 123, 0)
-    self.assertEqual(123, http_metrics['status'])
-
-    http_metrics = metrics_utils.extract_http_metrics('', '', 404, 0)
-    self.assertEqual(404, http_metrics['status'])
-
-  def test_response_time(self):
-    """Tests that the response time we passed is returned."""
-    http_metrics = metrics_utils.extract_http_metrics('', '', 0, 0.25)
-    self.assertEqual(0.25, http_metrics['response_time'])
-
-    http_metrics = metrics_utils.extract_http_metrics('', '', 0, 12345.25)
-    self.assertEqual(12345.25, http_metrics['response_time'])
-
-  @mock.patch('metrics_utils.subprocess2.Popen')
-  def test_get_git_version(self, mockPopen):
-    """Tests that we can get the git version."""
-    mockProcess = mock.Mock()
-    mockProcess.communicate.side_effect = [(b'git version 2.18.0.123.foo', '')]
-    mockPopen.side_effect = [mockProcess]
-
-    self.assertEqual('2.18.0', metrics_utils.get_git_version())
-
-  @mock.patch('metrics_utils.subprocess2.Popen')
-  def test_get_git_version_unrecognized(self, mockPopen):
-    """Tests that we can get the git version."""
-    mockProcess = mock.Mock()
-    mockProcess.communicate.side_effect = [(b'Blah blah blah', 'blah blah')]
-    mockPopen.side_effect = [mockProcess]
-
-    self.assertIsNone(metrics_utils.get_git_version())
-
-  def test_extract_known_subcommand_args(self):
-    """Tests that we can extract known subcommand args."""
-    result = metrics_utils.extract_known_subcommand_args([
-      'm=Fix issue with ccs', 'cc=foo@example.com', 'cc=bar@example.com'])
-    self.assertEqual(['cc', 'cc', 'm'], result)
-
-    result = metrics_utils.extract_known_subcommand_args([
-      'm=Some title mentioning cc and hashtag', 'notify=NONE', 'private'])
-    self.assertEqual(['m', 'notify=NONE', 'private'], result)
-
-    result = metrics_utils.extract_known_subcommand_args([
-      'foo=bar', 'another_unkwnon_arg'])
-    self.assertEqual([], result)
+    def test_extracts_host(self):
+        """Test that we extract the host from the requested URI."""
+        # Regular case
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://chromium-review.googlesource.com/foo/bar?q=baz', '', 0, 0)
+        self.assertEqual('chromium-review.googlesource.com',
+                         http_metrics['host'])
+
+        # Unexpected host
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://foo-review.googlesource.com/', '', 0, 0)
+        self.assertNotIn('host', http_metrics)
+
+    def test_extracts_path(self):
+        """Test that we extract the matching path from the requested URI."""
+        # Regular case
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/changes/1234/revisions/deadbeef/commit',
+            '', 0, 0)
+        self.assertEqual('changes/revisions/commit', http_metrics['path'])
+
+        # No matching paths
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/changes/1234/unexpected/path', '', 0, 0)
+        self.assertNotIn('path', http_metrics)
+
+    def test_extracts_path_changes(self):
+        """Tests that we extract paths for /changes/."""
+        # /changes/<change-id>
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/changes/proj%2Fsrc%7Emain%7EI1234abcd',
+            '', 0, 0)
+        self.assertEqual('changes', http_metrics['path'])
+
+        # /changes/?q=<something>
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/changes/?q=owner:me+OR+cc:me', '', 0, 0)
+        self.assertEqual('changes', http_metrics['path'])
+
+        # /changes/#<something>
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/changes/#something', '', 0, 0)
+        self.assertEqual('changes', http_metrics['path'])
+
+        # /changes/<change-id>/<anything> does not map to changes.
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/changes/12345678/message', '', 0, 0)
+        self.assertNotEqual('changes', http_metrics['path'])
+
+    def test_extracts_arguments(self):
+        """Test that we can extract arguments from the requested URI."""
+        # Regular case
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/?q=123&foo=bar&o=ALL_REVISIONS', '', 0,
+            0)
+        self.assertEqual(['ALL_REVISIONS'], http_metrics['arguments'])
+
+        # Some unexpected arguments are filtered out.
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/?o=ALL_REVISIONS&o=LABELS&o=UNEXPECTED',
+            '', 0, 0)
+        self.assertEqual(['ALL_REVISIONS', 'LABELS'], http_metrics['arguments'])
+
+        # No valid arguments, so arguments is not present
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/?o=bar&baz=1', '', 0, 0)
+        self.assertNotIn('arguments', http_metrics)
+
+        # No valid arguments, so arguments is not present
+        http_metrics = metrics_utils.extract_http_metrics(
+            'https://review.example.com/?foo=bar&baz=1', '', 0, 0)
+        self.assertNotIn('arguments', http_metrics)
+
+    def test_validates_method(self):
+        """Test that we validate the HTTP method used."""
+        # Regular case
+        http_metrics = metrics_utils.extract_http_metrics('', 'POST', 0, 0)
+        self.assertEqual('POST', http_metrics['method'])
+
+        # Unexpected method is not reported
+        http_metrics = metrics_utils.extract_http_metrics('', 'DEMAND', 0, 0)
+        self.assertNotIn('method', http_metrics)
+
+    def test_status(self):
+        """Tests that the response status we passed is returned."""
+        http_metrics = metrics_utils.extract_http_metrics('', '', 123, 0)
+        self.assertEqual(123, http_metrics['status'])
+
+        http_metrics = metrics_utils.extract_http_metrics('', '', 404, 0)
+        self.assertEqual(404, http_metrics['status'])
+
+    def test_response_time(self):
+        """Tests that the response time we passed is returned."""
+        http_metrics = metrics_utils.extract_http_metrics('', '', 0, 0.25)
+        self.assertEqual(0.25, http_metrics['response_time'])
+
+        http_metrics = metrics_utils.extract_http_metrics('', '', 0, 12345.25)
+        self.assertEqual(12345.25, http_metrics['response_time'])
+
+    @mock.patch('metrics_utils.subprocess2.Popen')
+    def test_get_git_version(self, mockPopen):
+        """Tests that we can get the git version."""
+        mockProcess = mock.Mock()
+        mockProcess.communicate.side_effect = [(b'git version 2.18.0.123.foo',
+                                                '')]
+        mockPopen.side_effect = [mockProcess]
+
+        self.assertEqual('2.18.0', metrics_utils.get_git_version())
+
+    @mock.patch('metrics_utils.subprocess2.Popen')
+    def test_get_git_version_unrecognized(self, mockPopen):
+        """Tests that we can get the git version."""
+        mockProcess = mock.Mock()
+        mockProcess.communicate.side_effect = [(b'Blah blah blah', 'blah blah')]
+        mockPopen.side_effect = [mockProcess]
+
+        self.assertIsNone(metrics_utils.get_git_version())
+
+    def test_extract_known_subcommand_args(self):
+        """Tests that we can extract known subcommand args."""
+        result = metrics_utils.extract_known_subcommand_args([
+            'm=Fix issue with ccs', 'cc=foo@example.com', 'cc=bar@example.com'
+        ])
+        self.assertEqual(['cc', 'cc', 'm'], result)
+
+        result = metrics_utils.extract_known_subcommand_args([
+            'm=Some title mentioning cc and hashtag', 'notify=NONE', 'private'
+        ])
+        self.assertEqual(['m', 'notify=NONE', 'private'], result)
+
+        result = metrics_utils.extract_known_subcommand_args(
+            ['foo=bar', 'another_unkwnon_arg'])
+        self.assertEqual([], result)
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 70 - 70
tests/my_activity_test.py

@@ -14,81 +14,81 @@ sys.path.insert(0, DEPOT_TOOLS_ROOT)
 
 import my_activity
 
+
 class MyActivityTest(unittest.TestCase):
-  def test_datetime_to_midnight(self):
-    self.assertEqual(
-        datetime(2020, 9, 12),
-        my_activity.datetime_to_midnight(datetime(2020, 9, 12, 13, 0, 0)))
-    self.assertEqual(
-        datetime(2020, 12, 31),
-        my_activity.datetime_to_midnight(datetime(2020, 12, 31, 23, 59, 59)))
-    self.assertEqual(
-        datetime(2020, 12, 31),
-        my_activity.datetime_to_midnight(datetime(2020, 12, 31)))
+    def test_datetime_to_midnight(self):
+        self.assertEqual(
+            datetime(2020, 9, 12),
+            my_activity.datetime_to_midnight(datetime(2020, 9, 12, 13, 0, 0)))
+        self.assertEqual(
+            datetime(2020, 12, 31),
+            my_activity.datetime_to_midnight(datetime(2020, 12, 31, 23, 59,
+                                                      59)))
+        self.assertEqual(
+            datetime(2020, 12, 31),
+            my_activity.datetime_to_midnight(datetime(2020, 12, 31)))
+
+    def test_get_quarter_of(self):
+        self.assertEqual((datetime(2020, 7, 1), datetime(2020, 10, 1)),
+                         my_activity.get_quarter_of(datetime(2020, 9, 12)))
+        # Quarter range includes beginning
+        self.assertEqual((datetime(2020, 10, 1), datetime(2021, 1, 1)),
+                         my_activity.get_quarter_of(datetime(2020, 10, 1)))
+        # Quarter range excludes end
+        self.assertEqual((datetime(2021, 1, 1), datetime(2021, 4, 1)),
+                         my_activity.get_quarter_of(datetime(2021, 1, 1)))
+        self.assertEqual(
+            (datetime(2020, 10, 1), datetime(2021, 1, 1)),
+            my_activity.get_quarter_of(datetime(2020, 12, 31, 23, 59, 59)))
 
-  def test_get_quarter_of(self):
-    self.assertEqual(
-        (datetime(2020, 7, 1), datetime(2020, 10, 1)),
-        my_activity.get_quarter_of(datetime(2020, 9, 12)))
-    # Quarter range includes beginning
-    self.assertEqual(
-        (datetime(2020, 10, 1), datetime(2021, 1, 1)),
-        my_activity.get_quarter_of(datetime(2020, 10, 1)))
-    # Quarter range excludes end
-    self.assertEqual(
-        (datetime(2021, 1, 1), datetime(2021, 4, 1)),
-        my_activity.get_quarter_of(datetime(2021, 1, 1)))
-    self.assertEqual(
-        (datetime(2020, 10, 1), datetime(2021, 1, 1)),
-        my_activity.get_quarter_of(datetime(2020, 12, 31, 23, 59, 59)))
+    def test_get_year_of(self):
+        self.assertEqual((datetime(2020, 1, 1), datetime(2021, 1, 1)),
+                         my_activity.get_year_of(datetime(2020, 9, 12)))
+        # Year range includes beginning
+        self.assertEqual((datetime(2020, 1, 1), datetime(2021, 1, 1)),
+                         my_activity.get_year_of(datetime(2020, 1, 1)))
+        # Year range excludes end
+        self.assertEqual((datetime(2021, 1, 1), datetime(2022, 1, 1)),
+                         my_activity.get_year_of(datetime(2021, 1, 1)))
 
-  def test_get_year_of(self):
-    self.assertEqual(
-        (datetime(2020, 1, 1), datetime(2021, 1, 1)),
-        my_activity.get_year_of(datetime(2020, 9, 12)))
-    # Year range includes beginning
-    self.assertEqual(
-        (datetime(2020, 1, 1), datetime(2021, 1, 1)),
-        my_activity.get_year_of(datetime(2020, 1, 1)))
-    # Year range excludes end
-    self.assertEqual(
-        (datetime(2021, 1, 1), datetime(2022, 1, 1)),
-        my_activity.get_year_of(datetime(2021, 1, 1)))
+    def test_get_week_of(self):
+        self.assertEqual((datetime(2020, 9, 7), datetime(2020, 9, 14)),
+                         my_activity.get_week_of(datetime(2020, 9, 12)))
+        # Week range includes beginning
+        self.assertEqual((datetime(2020, 9, 7), datetime(2020, 9, 14)),
+                         my_activity.get_week_of(datetime(2020, 9, 7)))
+        # Week range excludes beginning
+        self.assertEqual((datetime(2020, 9, 14), datetime(2020, 9, 21)),
+                         my_activity.get_week_of(datetime(2020, 9, 14)))
 
-  def test_get_week_of(self):
-    self.assertEqual(
-        (datetime(2020, 9, 7), datetime(2020, 9, 14)),
-        my_activity.get_week_of(datetime(2020, 9, 12)))
-    # Week range includes beginning
-    self.assertEqual(
-        (datetime(2020, 9, 7), datetime(2020, 9, 14)),
-        my_activity.get_week_of(datetime(2020, 9, 7)))
-    # Week range excludes beginning
-    self.assertEqual(
-        (datetime(2020, 9, 14), datetime(2020, 9, 21)),
-        my_activity.get_week_of(datetime(2020, 9, 14)))
+    def _get_issue_with_description(self, description):
+        return {
+            'current_revision': 'rev',
+            'revisions': {
+                'rev': {
+                    'commit': {
+                        'message': description
+                    }
+                }
+            },
+        }
 
-  def _get_issue_with_description(self, description):
-    return {
-      'current_revision': 'rev',
-      'revisions': {'rev': {'commit': {'message': description}}},
-    }
+    def test_extract_bug_numbers_from_description(self):
+        issue = self._get_issue_with_description(
+            'Title\n'
+            '\n'
+            'Description\n'
+            'A comment:\n'
+            '> Bug: 1234, another:5678\n'
+            '\n'
+            'Bug: another:1234, 5678\n'
+            'BUG=project:13141516\n'
+            'Fixed: fixed:9101112\n'
+            'Change-Id: Iabcdef1234567890\n')
+        self.assertEqual([
+            'another:1234', 'chromium:5678', 'fixed:9101112', 'project:13141516'
+        ], my_activity.extract_bug_numbers_from_description(issue))
 
-  def test_extract_bug_numbers_from_description(self):
-    issue = self._get_issue_with_description(
-        'Title\n'
-        '\n'
-        'Description\n'
-        'A comment:\n'
-        '> Bug: 1234, another:5678\n'
-        '\n'
-        'Bug: another:1234, 5678\n'
-        'BUG=project:13141516\n'
-        'Fixed: fixed:9101112\n'
-        'Change-Id: Iabcdef1234567890\n')
-    self.assertEqual(
-        ['another:1234', 'chromium:5678', 'fixed:9101112', 'project:13141516'],
-        my_activity.extract_bug_numbers_from_description(issue))
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 271 - 258
tests/ninja_reclient_test.py

@@ -19,275 +19,288 @@ from testing_support import trial_dir
 
 
 def write(filename, content):
-  """Writes the content of a file and create the directories as needed."""
-  filename = os.path.abspath(filename)
-  dirname = os.path.dirname(filename)
-  if not os.path.isdir(dirname):
-    os.makedirs(dirname)
-  with open(filename, 'w') as f:
-    f.write(content)
+    """Writes the content of a file and create the directories as needed."""
+    filename = os.path.abspath(filename)
+    dirname = os.path.dirname(filename)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+    with open(filename, 'w') as f:
+        f.write(content)
 
 
 class NinjaReclientTest(trial_dir.TestCase):
-  def setUp(self):
-    super(NinjaReclientTest, self).setUp()
-    self.previous_dir = os.getcwd()
-    os.chdir(self.root_dir)
-
-  def tearDown(self):
-    os.chdir(self.previous_dir)
-    super(NinjaReclientTest, self).tearDown()
-
-  @unittest.mock.patch.dict(os.environ, {})
-  @unittest.mock.patch('subprocess.call', return_value=0)
-  @unittest.mock.patch('ninja.main', return_value=0)
-  @unittest.mock.patch('reclient_metrics.check_status', return_value=True)
-  def test_ninja_reclient_collect_metrics(self, mock_metrics_status, mock_ninja,
-                                          mock_call):
-    reclient_bin_dir = os.path.join('src', 'buildtools', 'reclient')
-    reclient_cfg = os.path.join('src', 'buildtools', 'reclient_cfgs',
-                                'reproxy.cfg')
-    write('.gclient', '')
-    write('.gclient_entries', 'entries = {"buildtools": "..."}')
-    write(os.path.join(reclient_bin_dir, 'version.txt'), '0.0')
-    write(reclient_cfg, '0.0')
-    argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
-
-    self.assertEqual(0, ninja_reclient.main(argv))
-
-    self.assertTrue(
-        os.path.isdir(os.path.join(self.root_dir, "out", "a", ".reproxy_tmp")))
-    self.assertTrue(
-        os.path.isdir(
+    def setUp(self):
+        super(NinjaReclientTest, self).setUp()
+        self.previous_dir = os.getcwd()
+        os.chdir(self.root_dir)
+
+    def tearDown(self):
+        os.chdir(self.previous_dir)
+        super(NinjaReclientTest, self).tearDown()
+
+    @unittest.mock.patch.dict(os.environ, {})
+    @unittest.mock.patch('subprocess.call', return_value=0)
+    @unittest.mock.patch('ninja.main', return_value=0)
+    @unittest.mock.patch('reclient_metrics.check_status', return_value=True)
+    def test_ninja_reclient_collect_metrics(self, mock_metrics_status,
+                                            mock_ninja, mock_call):
+        reclient_bin_dir = os.path.join('src', 'buildtools', 'reclient')
+        reclient_cfg = os.path.join('src', 'buildtools', 'reclient_cfgs',
+                                    'reproxy.cfg')
+        write('.gclient', '')
+        write('.gclient_entries', 'entries = {"buildtools": "..."}')
+        write(os.path.join(reclient_bin_dir, 'version.txt'), '0.0')
+        write(reclient_cfg, '0.0')
+        argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
+
+        self.assertEqual(0, ninja_reclient.main(argv))
+
+        self.assertTrue(
+            os.path.isdir(
+                os.path.join(self.root_dir, "out", "a", ".reproxy_tmp")))
+        self.assertTrue(
+            os.path.isdir(
+                os.path.join(
+                    self.root_dir, ".reproxy_cache",
+                    hashlib.md5(
+                        os.path.join(self.root_dir, "out", "a",
+                                     ".reproxy_tmp").encode()).hexdigest())))
+        self.assertTrue(
+            os.path.isdir(
+                os.path.join(self.root_dir, "out", "a", ".reproxy_tmp",
+                             "logs")))
+        self.assertEqual(
+            os.environ.get('RBE_output_dir'),
+            os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"))
+        self.assertEqual(
+            os.environ.get('RBE_proxy_log_dir'),
+            os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"))
+        self.assertEqual(
+            os.environ.get('RBE_cache_dir'),
             os.path.join(
                 self.root_dir, ".reproxy_cache",
                 hashlib.md5(
                     os.path.join(self.root_dir, "out", "a",
-                                 ".reproxy_tmp").encode()).hexdigest())))
-    self.assertTrue(
-        os.path.isdir(
-            os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs")))
-    self.assertEqual(
-        os.environ.get('RBE_output_dir'),
-        os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"))
-    self.assertEqual(
-        os.environ.get('RBE_proxy_log_dir'),
-        os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"))
-    self.assertEqual(
-        os.environ.get('RBE_cache_dir'),
-        os.path.join(
-            self.root_dir, ".reproxy_cache",
-            hashlib.md5(
-                os.path.join(self.root_dir, "out", "a",
-                             ".reproxy_tmp").encode()).hexdigest()))
-    if sys.platform.startswith('win'):
-      self.assertEqual(
-          os.environ.get('RBE_server_address'),
-          "pipe://%s/reproxy.pipe" % hashlib.md5(
-              os.path.join(self.root_dir, "out", "a",
-                           ".reproxy_tmp").encode()).hexdigest())
-    else:
-      self.assertEqual(
-          os.environ.get('RBE_server_address'),
-          "unix:///tmp/reproxy_%s.sock" % hashlib.sha256(
-              os.path.join(self.root_dir, "out", "a",
-                           ".reproxy_tmp").encode()).hexdigest())
-
-    self.assertEqual(os.environ.get('RBE_metrics_project'),
-                     "chromium-reclient-metrics")
-    self.assertEqual(os.environ.get('RBE_metrics_table'), "rbe_metrics.builds")
-    self.assertEqual(os.environ.get('RBE_metrics_labels'),
-                     "source=developer,tool=ninja_reclient")
-    self.assertEqual(os.environ.get('RBE_metrics_prefix'), "go.chromium.org")
-
-    mock_metrics_status.assert_called_once_with("out/a")
-    mock_ninja.assert_called_once_with(argv)
-    mock_call.assert_has_calls([
-        unittest.mock.call([
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'bootstrap' + gclient_paths.GetExeSuffix()),
-            "--re_proxy=" +
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'reproxy' + gclient_paths.GetExeSuffix()),
-            "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
-        ]),
-        unittest.mock.call([
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'bootstrap' + gclient_paths.GetExeSuffix()),
-            "--shutdown", "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
-        ]),
-    ])
-
-  @unittest.mock.patch.dict(os.environ, {})
-  @unittest.mock.patch('subprocess.call', return_value=0)
-  @unittest.mock.patch('ninja.main', return_value=0)
-  @unittest.mock.patch('reclient_metrics.check_status', return_value=False)
-  def test_ninja_reclient_do_not_collect_metrics(self, mock_metrics_status,
-                                                 mock_ninja, mock_call):
-    reclient_bin_dir = os.path.join('src', 'buildtools', 'reclient')
-    reclient_cfg = os.path.join('src', 'buildtools', 'reclient_cfgs',
-                                'reproxy.cfg')
-    write('.gclient', '')
-    write('.gclient_entries', 'entries = {"buildtools": "..."}')
-    write(os.path.join(reclient_bin_dir, 'version.txt'), '0.0')
-    write(reclient_cfg, '0.0')
-    argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
-
-    self.assertEqual(0, ninja_reclient.main(argv))
-
-    self.assertTrue(
-        os.path.isdir(os.path.join(self.root_dir, "out", "a", ".reproxy_tmp")))
-    self.assertTrue(
-        os.path.isdir(
+                                 ".reproxy_tmp").encode()).hexdigest()))
+        if sys.platform.startswith('win'):
+            self.assertEqual(
+                os.environ.get('RBE_server_address'),
+                "pipe://%s/reproxy.pipe" % hashlib.md5(
+                    os.path.join(self.root_dir, "out", "a",
+                                 ".reproxy_tmp").encode()).hexdigest())
+        else:
+            self.assertEqual(
+                os.environ.get('RBE_server_address'),
+                "unix:///tmp/reproxy_%s.sock" % hashlib.sha256(
+                    os.path.join(self.root_dir, "out", "a",
+                                 ".reproxy_tmp").encode()).hexdigest())
+
+        self.assertEqual(os.environ.get('RBE_metrics_project'),
+                         "chromium-reclient-metrics")
+        self.assertEqual(os.environ.get('RBE_metrics_table'),
+                         "rbe_metrics.builds")
+        self.assertEqual(os.environ.get('RBE_metrics_labels'),
+                         "source=developer,tool=ninja_reclient")
+        self.assertEqual(os.environ.get('RBE_metrics_prefix'),
+                         "go.chromium.org")
+
+        mock_metrics_status.assert_called_once_with("out/a")
+        mock_ninja.assert_called_once_with(argv)
+        mock_call.assert_has_calls([
+            unittest.mock.call([
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'bootstrap' + gclient_paths.GetExeSuffix()),
+                "--re_proxy=" +
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'reproxy' + gclient_paths.GetExeSuffix()),
+                "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
+            ]),
+            unittest.mock.call([
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'bootstrap' + gclient_paths.GetExeSuffix()),
+                "--shutdown",
+                "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
+            ]),
+        ])
+
+    @unittest.mock.patch.dict(os.environ, {})
+    @unittest.mock.patch('subprocess.call', return_value=0)
+    @unittest.mock.patch('ninja.main', return_value=0)
+    @unittest.mock.patch('reclient_metrics.check_status', return_value=False)
+    def test_ninja_reclient_do_not_collect_metrics(self, mock_metrics_status,
+                                                   mock_ninja, mock_call):
+        reclient_bin_dir = os.path.join('src', 'buildtools', 'reclient')
+        reclient_cfg = os.path.join('src', 'buildtools', 'reclient_cfgs',
+                                    'reproxy.cfg')
+        write('.gclient', '')
+        write('.gclient_entries', 'entries = {"buildtools": "..."}')
+        write(os.path.join(reclient_bin_dir, 'version.txt'), '0.0')
+        write(reclient_cfg, '0.0')
+        argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
+
+        self.assertEqual(0, ninja_reclient.main(argv))
+
+        self.assertTrue(
+            os.path.isdir(
+                os.path.join(self.root_dir, "out", "a", ".reproxy_tmp")))
+        self.assertTrue(
+            os.path.isdir(
+                os.path.join(
+                    self.root_dir, ".reproxy_cache",
+                    hashlib.md5(
+                        os.path.join(self.root_dir, "out", "a",
+                                     ".reproxy_tmp").encode()).hexdigest())))
+        self.assertTrue(
+            os.path.isdir(
+                os.path.join(self.root_dir, "out", "a", ".reproxy_tmp",
+                             "logs")))
+        self.assertEqual(
+            os.environ.get('RBE_output_dir'),
+            os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"))
+        self.assertEqual(
+            os.environ.get('RBE_proxy_log_dir'),
+            os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"))
+        self.assertEqual(
+            os.environ.get('RBE_cache_dir'),
             os.path.join(
                 self.root_dir, ".reproxy_cache",
                 hashlib.md5(
                     os.path.join(self.root_dir, "out", "a",
-                                 ".reproxy_tmp").encode()).hexdigest())))
-    self.assertTrue(
-        os.path.isdir(
-            os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs")))
-    self.assertEqual(
-        os.environ.get('RBE_output_dir'),
-        os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"))
-    self.assertEqual(
-        os.environ.get('RBE_proxy_log_dir'),
-        os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"))
-    self.assertEqual(
-        os.environ.get('RBE_cache_dir'),
-        os.path.join(
-            self.root_dir, ".reproxy_cache",
-            hashlib.md5(
-                os.path.join(self.root_dir, "out", "a",
-                             ".reproxy_tmp").encode()).hexdigest()))
-    if sys.platform.startswith('win'):
-      self.assertEqual(
-          os.environ.get('RBE_server_address'),
-          "pipe://%s/reproxy.pipe" % hashlib.md5(
-              os.path.join(self.root_dir, "out", "a",
-                           ".reproxy_tmp").encode()).hexdigest())
-    else:
-      self.assertEqual(
-          os.environ.get('RBE_server_address'),
-          "unix:///tmp/reproxy_%s.sock" % hashlib.sha256(
-              os.path.join(self.root_dir, "out", "a",
-                           ".reproxy_tmp").encode()).hexdigest())
-
-    self.assertEqual(os.environ.get('RBE_metrics_project'), None)
-    self.assertEqual(os.environ.get('RBE_metrics_table'), None)
-    self.assertEqual(os.environ.get('RBE_metrics_labels'), None)
-    self.assertEqual(os.environ.get('RBE_metrics_prefix'), None)
-
-    mock_metrics_status.assert_called_once_with("out/a")
-    mock_ninja.assert_called_once_with(argv)
-    mock_call.assert_has_calls([
-        unittest.mock.call([
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'bootstrap' + gclient_paths.GetExeSuffix()),
-            "--re_proxy=" +
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'reproxy' + gclient_paths.GetExeSuffix()),
-            "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
-        ]),
-        unittest.mock.call([
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'bootstrap' + gclient_paths.GetExeSuffix()),
-            "--shutdown", "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
-        ]),
-    ])
-
-  @unittest.mock.patch.dict(os.environ, {})
-  @unittest.mock.patch('subprocess.call', return_value=0)
-  @unittest.mock.patch('ninja.main', return_value=0)
-  @unittest.mock.patch('reclient_metrics.check_status', return_value=True)
-  def test_ninja_reclient_clears_log_dir(self, mock_metrics_status, mock_ninja,
-                                         mock_call):
-    reclient_bin_dir = os.path.join('src', 'buildtools', 'reclient')
-    reclient_cfg = os.path.join('src', 'buildtools', 'reclient_cfgs',
-                                'reproxy.cfg')
-    write('.gclient', '')
-    write('.gclient_entries', 'entries = {"buildtools": "..."}')
-    write(os.path.join(reclient_bin_dir, 'version.txt'), '0.0')
-    write(reclient_cfg, '0.0')
-    argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
-
-    os.makedirs(os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs"),
-                exist_ok=True)
-    with open(
-        os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs",
-                     "reproxy.rpl"), "w") as f:
-      print("Content", file=f)
-
-    self.assertEqual(0, ninja_reclient.main(argv))
-
-    self.assertTrue(
-        os.path.isdir(os.path.join(self.root_dir, "out", "a", ".reproxy_tmp")))
-    self.assertTrue(
-        os.path.isdir(
-            os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs")))
-    self.assertFalse(
-        os.path.isfile(
-            os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs",
-                         "reproxy.rpl")))
-
-  @unittest.mock.patch('subprocess.call', return_value=0)
-  @unittest.mock.patch('ninja.main', side_effect=KeyboardInterrupt())
-  def test_ninja_reclient_ninja_interrupted(self, mock_ninja, mock_call):
-    reclient_bin_dir = os.path.join('src', 'buildtools', 'reclient')
-    reclient_cfg = os.path.join('src', 'buildtools', 'reclient_cfgs',
-                                'reproxy.cfg')
-    write('.gclient', '')
-    write('.gclient_entries', 'entries = {"buildtools": "..."}')
-    write(os.path.join(reclient_bin_dir, 'version.txt'), '0.0')
-    write(reclient_cfg, '0.0')
-    argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
-
-    self.assertEqual(1, ninja_reclient.main(argv))
-
-    mock_ninja.assert_called_once_with(argv)
-    mock_call.assert_has_calls([
-        unittest.mock.call([
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'bootstrap' + gclient_paths.GetExeSuffix()),
-            "--re_proxy=" +
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'reproxy' + gclient_paths.GetExeSuffix()),
-            "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
-        ]),
-        unittest.mock.call([
-            os.path.join(self.root_dir, reclient_bin_dir,
-                         'bootstrap' + gclient_paths.GetExeSuffix()),
-            "--shutdown", "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
-        ]),
-    ])
-
-  @unittest.mock.patch('subprocess.call', return_value=0)
-  @unittest.mock.patch('ninja.main', return_value=0)
-  def test_ninja_reclient_cfg_not_found(self, mock_ninja, mock_call):
-    write('.gclient', '')
-    write('.gclient_entries', 'entries = {"buildtools": "..."}')
-    write(os.path.join('src', 'buildtools', 'reclient', 'version.txt'), '0.0')
-    argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
-
-    self.assertEqual(1, ninja_reclient.main(argv))
-
-    mock_ninja.assert_not_called()
-    mock_call.assert_not_called()
-
-  @unittest.mock.patch('subprocess.call', return_value=0)
-  @unittest.mock.patch('ninja.main', return_value=0)
-  def test_ninja_reclient_bins_not_found(self, mock_ninja, mock_call):
-    write('.gclient', '')
-    write('.gclient_entries', 'entries = {"buildtools": "..."}')
-    write(os.path.join('src', 'buildtools', 'reclient_cfgs', 'reproxy.cfg'),
-          '0.0')
-    argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
-
-    self.assertEqual(1, ninja_reclient.main(argv))
-
-    mock_ninja.assert_not_called()
-    mock_call.assert_not_called()
+                                 ".reproxy_tmp").encode()).hexdigest()))
+        if sys.platform.startswith('win'):
+            self.assertEqual(
+                os.environ.get('RBE_server_address'),
+                "pipe://%s/reproxy.pipe" % hashlib.md5(
+                    os.path.join(self.root_dir, "out", "a",
+                                 ".reproxy_tmp").encode()).hexdigest())
+        else:
+            self.assertEqual(
+                os.environ.get('RBE_server_address'),
+                "unix:///tmp/reproxy_%s.sock" % hashlib.sha256(
+                    os.path.join(self.root_dir, "out", "a",
+                                 ".reproxy_tmp").encode()).hexdigest())
+
+        self.assertEqual(os.environ.get('RBE_metrics_project'), None)
+        self.assertEqual(os.environ.get('RBE_metrics_table'), None)
+        self.assertEqual(os.environ.get('RBE_metrics_labels'), None)
+        self.assertEqual(os.environ.get('RBE_metrics_prefix'), None)
+
+        mock_metrics_status.assert_called_once_with("out/a")
+        mock_ninja.assert_called_once_with(argv)
+        mock_call.assert_has_calls([
+            unittest.mock.call([
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'bootstrap' + gclient_paths.GetExeSuffix()),
+                "--re_proxy=" +
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'reproxy' + gclient_paths.GetExeSuffix()),
+                "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
+            ]),
+            unittest.mock.call([
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'bootstrap' + gclient_paths.GetExeSuffix()),
+                "--shutdown",
+                "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
+            ]),
+        ])
+
+    @unittest.mock.patch.dict(os.environ, {})
+    @unittest.mock.patch('subprocess.call', return_value=0)
+    @unittest.mock.patch('ninja.main', return_value=0)
+    @unittest.mock.patch('reclient_metrics.check_status', return_value=True)
+    def test_ninja_reclient_clears_log_dir(self, mock_metrics_status,
+                                           mock_ninja, mock_call):
+        reclient_bin_dir = os.path.join('src', 'buildtools', 'reclient')
+        reclient_cfg = os.path.join('src', 'buildtools', 'reclient_cfgs',
+                                    'reproxy.cfg')
+        write('.gclient', '')
+        write('.gclient_entries', 'entries = {"buildtools": "..."}')
+        write(os.path.join(reclient_bin_dir, 'version.txt'), '0.0')
+        write(reclient_cfg, '0.0')
+        argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
+
+        os.makedirs(os.path.join(self.root_dir, "out", "a", ".reproxy_tmp",
+                                 "logs"),
+                    exist_ok=True)
+        with open(
+                os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs",
+                             "reproxy.rpl"), "w") as f:
+            print("Content", file=f)
+
+        self.assertEqual(0, ninja_reclient.main(argv))
+
+        self.assertTrue(
+            os.path.isdir(
+                os.path.join(self.root_dir, "out", "a", ".reproxy_tmp")))
+        self.assertTrue(
+            os.path.isdir(
+                os.path.join(self.root_dir, "out", "a", ".reproxy_tmp",
+                             "logs")))
+        self.assertFalse(
+            os.path.isfile(
+                os.path.join(self.root_dir, "out", "a", ".reproxy_tmp", "logs",
+                             "reproxy.rpl")))
+
+    @unittest.mock.patch('subprocess.call', return_value=0)
+    @unittest.mock.patch('ninja.main', side_effect=KeyboardInterrupt())
+    def test_ninja_reclient_ninja_interrupted(self, mock_ninja, mock_call):
+        reclient_bin_dir = os.path.join('src', 'buildtools', 'reclient')
+        reclient_cfg = os.path.join('src', 'buildtools', 'reclient_cfgs',
+                                    'reproxy.cfg')
+        write('.gclient', '')
+        write('.gclient_entries', 'entries = {"buildtools": "..."}')
+        write(os.path.join(reclient_bin_dir, 'version.txt'), '0.0')
+        write(reclient_cfg, '0.0')
+        argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
+
+        self.assertEqual(1, ninja_reclient.main(argv))
+
+        mock_ninja.assert_called_once_with(argv)
+        mock_call.assert_has_calls([
+            unittest.mock.call([
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'bootstrap' + gclient_paths.GetExeSuffix()),
+                "--re_proxy=" +
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'reproxy' + gclient_paths.GetExeSuffix()),
+                "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
+            ]),
+            unittest.mock.call([
+                os.path.join(self.root_dir, reclient_bin_dir,
+                             'bootstrap' + gclient_paths.GetExeSuffix()),
+                "--shutdown",
+                "--cfg=" + os.path.join(self.root_dir, reclient_cfg)
+            ]),
+        ])
+
+    @unittest.mock.patch('subprocess.call', return_value=0)
+    @unittest.mock.patch('ninja.main', return_value=0)
+    def test_ninja_reclient_cfg_not_found(self, mock_ninja, mock_call):
+        write('.gclient', '')
+        write('.gclient_entries', 'entries = {"buildtools": "..."}')
+        write(os.path.join('src', 'buildtools', 'reclient', 'version.txt'),
+              '0.0')
+        argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
+
+        self.assertEqual(1, ninja_reclient.main(argv))
+
+        mock_ninja.assert_not_called()
+        mock_call.assert_not_called()
+
+    @unittest.mock.patch('subprocess.call', return_value=0)
+    @unittest.mock.patch('ninja.main', return_value=0)
+    def test_ninja_reclient_bins_not_found(self, mock_ninja, mock_call):
+        write('.gclient', '')
+        write('.gclient_entries', 'entries = {"buildtools": "..."}')
+        write(os.path.join('src', 'buildtools', 'reclient_cfgs', 'reproxy.cfg'),
+              '0.0')
+        argv = ["ninja_reclient.py", "-C", "out/a", "chrome"]
+
+        self.assertEqual(1, ninja_reclient.main(argv))
+
+        mock_ninja.assert_not_called()
+        mock_call.assert_not_called()
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 160 - 144
tests/ninjalog_uploader_test.py

@@ -16,150 +16,166 @@ import ninjalog_uploader
 
 
 class NinjalogUploaderTest(unittest.TestCase):
-  def test_IsGoogler(self):
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      run_mock.return_value.returncode = 0
-      run_mock.return_value.stdout = ('Login as foo@google.com\n'
-                                      'goma is ready to use')
-      self.assertTrue(ninjalog_uploader.IsGoogler())
-
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      run_mock.return_value.returncode = 1
-      self.assertFalse(ninjalog_uploader.IsGoogler())
-
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      run_mock.return_value.returncode = 0
-      run_mock.return_value.stdout = ''
-      self.assertFalse(ninjalog_uploader.IsGoogler())
-
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      run_mock.return_value.returncode = 0
-      run_mock.return_value.stdout = 'Login as foo@example.com\n'
-      self.assertFalse(ninjalog_uploader.IsGoogler())
-
-  def test_parse_gn_args(self):
-    self.assertEqual(ninjalog_uploader.ParseGNArgs(json.dumps([])), {})
-
-    # Extract current configs from GN's output json.
-    self.assertEqual(ninjalog_uploader.ParseGNArgs(json.dumps([
-        {
-            'current': {'value': 'true'},
-            'default': {'value': 'false'},
-            'name': 'is_component_build'
-        },
-        {
-            'default': {'value': '"x64"'},
-            'name': 'host_cpu'
-        },
-    ])), {
-        'is_component_build': 'true',
-        'host_cpu': '"x64"',
-    })
-
-    self.assertEqual(ninjalog_uploader.ParseGNArgs(json.dumps([
-        {
-            'current': {'value': 'true'},
-            'default': {'value': 'false'},
-            'name': 'is_component_build'
-        },
-        {
-            'current': {'value': 'false'},
-            'default': {'value': 'false'},
-            'name': 'use_goma'
-        },
-    ])), {'is_component_build': 'true',
-          'use_goma': 'false'})
-
-  def test_get_ninjalog(self):
-    # No args => default to cwd.
-    self.assertEqual(ninjalog_uploader.GetNinjalog(['ninja']),
-                     './.ninja_log')
-
-    # Specified by -C case.
-    self.assertEqual(
-        ninjalog_uploader.GetNinjalog(['ninja', '-C', 'out/Release']),
-        'out/Release/.ninja_log')
-    self.assertEqual(
-        ninjalog_uploader.GetNinjalog(['ninja', '-Cout/Release']),
-        'out/Release/.ninja_log')
-
-    # Invalid -C flag case.
-    self.assertEqual(ninjalog_uploader.GetNinjalog(['ninja', '-C']),
-                     './.ninja_log')
-
-    # Multiple target directories => use the last directory.
-    self.assertEqual(ninjalog_uploader.GetNinjalog(
-        ['ninja', '-C', 'out/Release', '-C', 'out/Debug']),
-        'out/Debug/.ninja_log')
-
-  def test_get_build_target_from_command_line(self):
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine(
-            ['python3', 'ninja.py', 'chrome']), ['chrome'])
-
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine(['python3',
-                                                         'ninja.py']), [])
-
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine(
-            ['python3', 'ninja.py', '-j', '1000', 'chrome']), ['chrome'])
-
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine(
-            ['python3', 'ninja.py', 'chrome', '-j', '1000']), ['chrome'])
-
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine(
-            ['python3', 'ninja.py', '-C', 'chrome']), [])
-
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine(
-            ['python3', 'ninja.py', '-Cout/Release', 'chrome']), ['chrome'])
-
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine(
-            ['python3', 'ninja.py', '-C', 'out/Release', 'chrome', 'all']),
-        ['chrome', 'all'])
-
-  @unittest.skipIf(sys.platform == 'win32', 'posix path test')
-  def test_get_build_target_from_command_line_filter_posix(self):
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine([
-            'python3', 'ninja.py', '-C', 'out/Release', 'chrome', 'all',
-            '/path/to/foo', '-p'
-        ]), ['chrome', 'all'])
-
-  @unittest.skipUnless(sys.platform == 'win32', 'Windows path test')
-  def test_get_build_target_from_command_line_filter_win(self):
-    self.assertEqual(
-        ninjalog_uploader.GetBuildTargetFromCommandLine([
-            'python3', 'ninja.py', '-C', 'out/Release', 'chrome', 'all',
-            'C:\\path\\to\\foo', '-p'
-        ]), ['chrome', 'all'])
-
-  def test_get_j_flag(self):
-    self.assertEqual(ninjalog_uploader.GetJflag(
-        ['ninja']), None)
-
-    self.assertEqual(ninjalog_uploader.GetJflag(
-        ['ninja','-j', '1000']), 1000)
-
-    self.assertEqual(ninjalog_uploader.GetJflag(
-        ['ninja','-j', '1000a']), None)
-
-    self.assertEqual(ninjalog_uploader.GetJflag(
-        ['ninja','-j', 'a']), None)
-
-    self.assertEqual(ninjalog_uploader.GetJflag(
-        ['ninja','-j1000']), 1000)
-
-    self.assertEqual(ninjalog_uploader.GetJflag(
-        ['ninja','-ja']), None)
-
-    self.assertEqual(ninjalog_uploader.GetJflag(
-        ['ninja','-j']), None)
+    def test_IsGoogler(self):
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            run_mock.return_value.returncode = 0
+            run_mock.return_value.stdout = ('Login as foo@google.com\n'
+                                            'goma is ready to use')
+            self.assertTrue(ninjalog_uploader.IsGoogler())
+
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            run_mock.return_value.returncode = 1
+            self.assertFalse(ninjalog_uploader.IsGoogler())
+
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            run_mock.return_value.returncode = 0
+            run_mock.return_value.stdout = ''
+            self.assertFalse(ninjalog_uploader.IsGoogler())
+
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            run_mock.return_value.returncode = 0
+            run_mock.return_value.stdout = 'Login as foo@example.com\n'
+            self.assertFalse(ninjalog_uploader.IsGoogler())
+
+    def test_parse_gn_args(self):
+        self.assertEqual(ninjalog_uploader.ParseGNArgs(json.dumps([])), {})
+
+        # Extract current configs from GN's output json.
+        self.assertEqual(
+            ninjalog_uploader.ParseGNArgs(
+                json.dumps([
+                    {
+                        'current': {
+                            'value': 'true'
+                        },
+                        'default': {
+                            'value': 'false'
+                        },
+                        'name': 'is_component_build'
+                    },
+                    {
+                        'default': {
+                            'value': '"x64"'
+                        },
+                        'name': 'host_cpu'
+                    },
+                ])), {
+                    'is_component_build': 'true',
+                    'host_cpu': '"x64"',
+                })
+
+        self.assertEqual(
+            ninjalog_uploader.ParseGNArgs(
+                json.dumps([
+                    {
+                        'current': {
+                            'value': 'true'
+                        },
+                        'default': {
+                            'value': 'false'
+                        },
+                        'name': 'is_component_build'
+                    },
+                    {
+                        'current': {
+                            'value': 'false'
+                        },
+                        'default': {
+                            'value': 'false'
+                        },
+                        'name': 'use_goma'
+                    },
+                ])), {
+                    'is_component_build': 'true',
+                    'use_goma': 'false'
+                })
+
+    def test_get_ninjalog(self):
+        # No args => default to cwd.
+        self.assertEqual(ninjalog_uploader.GetNinjalog(['ninja']),
+                         './.ninja_log')
+
+        # Specified by -C case.
+        self.assertEqual(
+            ninjalog_uploader.GetNinjalog(['ninja', '-C', 'out/Release']),
+            'out/Release/.ninja_log')
+        self.assertEqual(
+            ninjalog_uploader.GetNinjalog(['ninja', '-Cout/Release']),
+            'out/Release/.ninja_log')
+
+        # Invalid -C flag case.
+        self.assertEqual(ninjalog_uploader.GetNinjalog(['ninja', '-C']),
+                         './.ninja_log')
+
+        # Multiple target directories => use the last directory.
+        self.assertEqual(
+            ninjalog_uploader.GetNinjalog(
+                ['ninja', '-C', 'out/Release', '-C', 'out/Debug']),
+            'out/Debug/.ninja_log')
+
+    def test_get_build_target_from_command_line(self):
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine(
+                ['python3', 'ninja.py', 'chrome']), ['chrome'])
+
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine(
+                ['python3', 'ninja.py']), [])
+
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine(
+                ['python3', 'ninja.py', '-j', '1000', 'chrome']), ['chrome'])
+
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine(
+                ['python3', 'ninja.py', 'chrome', '-j', '1000']), ['chrome'])
+
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine(
+                ['python3', 'ninja.py', '-C', 'chrome']), [])
+
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine(
+                ['python3', 'ninja.py', '-Cout/Release', 'chrome']), ['chrome'])
+
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine(
+                ['python3', 'ninja.py', '-C', 'out/Release', 'chrome', 'all']),
+            ['chrome', 'all'])
+
+    @unittest.skipIf(sys.platform == 'win32', 'posix path test')
+    def test_get_build_target_from_command_line_filter_posix(self):
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine([
+                'python3', 'ninja.py', '-C', 'out/Release', 'chrome', 'all',
+                '/path/to/foo', '-p'
+            ]), ['chrome', 'all'])
+
+    @unittest.skipUnless(sys.platform == 'win32', 'Windows path test')
+    def test_get_build_target_from_command_line_filter_win(self):
+        self.assertEqual(
+            ninjalog_uploader.GetBuildTargetFromCommandLine([
+                'python3', 'ninja.py', '-C', 'out/Release', 'chrome', 'all',
+                'C:\\path\\to\\foo', '-p'
+            ]), ['chrome', 'all'])
+
+    def test_get_j_flag(self):
+        self.assertEqual(ninjalog_uploader.GetJflag(['ninja']), None)
+
+        self.assertEqual(ninjalog_uploader.GetJflag(['ninja', '-j', '1000']),
+                         1000)
+
+        self.assertEqual(ninjalog_uploader.GetJflag(['ninja', '-j', '1000a']),
+                         None)
+
+        self.assertEqual(ninjalog_uploader.GetJflag(['ninja', '-j', 'a']), None)
+
+        self.assertEqual(ninjalog_uploader.GetJflag(['ninja', '-j1000']), 1000)
+
+        self.assertEqual(ninjalog_uploader.GetJflag(['ninja', '-ja']), None)
+
+        self.assertEqual(ninjalog_uploader.GetJflag(['ninja', '-j']), None)
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 224 - 226
tests/owners_client_test.py

@@ -13,7 +13,6 @@ sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
 import gerrit_util
 import owners_client
 
-
 alice = 'alice@example.com'
 bob = 'bob@example.com'
 chris = 'chris@example.com'
@@ -22,247 +21,246 @@ emily = 'emily@example.com'
 
 
 class GerritClientTest(unittest.TestCase):
-  def setUp(self):
-    self.client = owners_client.GerritClient('host', 'project', 'branch')
-    self.addCleanup(mock.patch.stopall)
-
-  def testListOwners(self):
-    mock.patch(
-        'gerrit_util.GetOwnersForFile',
-        return_value={
-          "code_owners": [
-            {
-              "account": {
-                "email": 'approver@example.com'
-              }
-            },
-            {
-              "account": {
-                "email": 'reviewer@example.com'
-              },
-            },
-            {
-              "account": {
-                "email": 'missing@example.com'
-              },
-            },
-            {
-              "account": {},
-            }
-          ]
-        }).start()
-    self.assertEqual(
-        ['approver@example.com', 'reviewer@example.com', 'missing@example.com'],
-        self.client.ListOwners(os.path.join('bar', 'everyone', 'foo.txt')))
-
-    # Result should be cached.
-    self.assertEqual(
-        ['approver@example.com', 'reviewer@example.com', 'missing@example.com'],
-        self.client.ListOwners(os.path.join('bar', 'everyone', 'foo.txt')))
-    # Always use slashes as separators.
-    gerrit_util.GetOwnersForFile.assert_called_once_with(
-        'host', 'project', 'branch', 'bar/everyone/foo.txt',
-        resolve_all_users=False, highest_score_only=False, seed=mock.ANY)
-
-  def testListOwnersOwnedByAll(self):
-    mock.patch(
-      'gerrit_util.GetOwnersForFile',
-      side_effect=[
-        {
-          "code_owners": [
-            {
-              "account": {
-                "email": 'foo@example.com'
-              },
-            },
-          ],
-          "owned_by_all_users": True,
-        },
-        {
-          "code_owners": [
-            {
-              "account": {
-                "email": 'bar@example.com'
-              },
-            },
-          ],
-          "owned_by_all_users": False,
-        },
-      ]
-    ).start()
-    self.assertEqual(['foo@example.com', self.client.EVERYONE],
-                     self.client.ListOwners('foo.txt'))
-    self.assertEqual(['bar@example.com'], self.client.ListOwners('bar.txt'))
+    def setUp(self):
+        self.client = owners_client.GerritClient('host', 'project', 'branch')
+        self.addCleanup(mock.patch.stopall)
+
+    def testListOwners(self):
+        mock.patch('gerrit_util.GetOwnersForFile',
+                   return_value={
+                       "code_owners": [{
+                           "account": {
+                               "email": 'approver@example.com'
+                           }
+                       }, {
+                           "account": {
+                               "email": 'reviewer@example.com'
+                           },
+                       }, {
+                           "account": {
+                               "email": 'missing@example.com'
+                           },
+                       }, {
+                           "account": {},
+                       }]
+                   }).start()
+        self.assertEqual([
+            'approver@example.com', 'reviewer@example.com',
+            'missing@example.com'
+        ], self.client.ListOwners(os.path.join('bar', 'everyone', 'foo.txt')))
+
+        # Result should be cached.
+        self.assertEqual([
+            'approver@example.com', 'reviewer@example.com',
+            'missing@example.com'
+        ], self.client.ListOwners(os.path.join('bar', 'everyone', 'foo.txt')))
+        # Always use slashes as separators.
+        gerrit_util.GetOwnersForFile.assert_called_once_with(
+            'host',
+            'project',
+            'branch',
+            'bar/everyone/foo.txt',
+            resolve_all_users=False,
+            highest_score_only=False,
+            seed=mock.ANY)
+
+    def testListOwnersOwnedByAll(self):
+        mock.patch('gerrit_util.GetOwnersForFile',
+                   side_effect=[
+                       {
+                           "code_owners": [
+                               {
+                                   "account": {
+                                       "email": 'foo@example.com'
+                                   },
+                               },
+                           ],
+                           "owned_by_all_users":
+                           True,
+                       },
+                       {
+                           "code_owners": [
+                               {
+                                   "account": {
+                                       "email": 'bar@example.com'
+                                   },
+                               },
+                           ],
+                           "owned_by_all_users":
+                           False,
+                       },
+                   ]).start()
+        self.assertEqual(['foo@example.com', self.client.EVERYONE],
+                         self.client.ListOwners('foo.txt'))
+        self.assertEqual(['bar@example.com'], self.client.ListOwners('bar.txt'))
 
 
 class TestClient(owners_client.OwnersClient):
-  def __init__(self, owners_by_path):
-    super(TestClient, self).__init__()
-    self.owners_by_path = owners_by_path
+    def __init__(self, owners_by_path):
+        super(TestClient, self).__init__()
+        self.owners_by_path = owners_by_path
 
-  def ListOwners(self, path):
-    return self.owners_by_path[path]
+    def ListOwners(self, path):
+        return self.owners_by_path[path]
 
 
 class OwnersClientTest(unittest.TestCase):
-  def setUp(self):
-    self.owners = {}
-    self.client = TestClient(self.owners)
-
-  def testGetFilesApprovalStatus(self):
-    self.client.owners_by_path = {
-      'approved': ['approver@example.com'],
-      'pending': ['reviewer@example.com'],
-      'insufficient': ['insufficient@example.com'],
-      'everyone': [owners_client.OwnersClient.EVERYONE],
-    }
-    self.assertEqual(
-        self.client.GetFilesApprovalStatus(
-            ['approved', 'pending', 'insufficient'],
-            ['approver@example.com'], ['reviewer@example.com']),
-        {
-          'approved': owners_client.OwnersClient.APPROVED,
-          'pending': owners_client.OwnersClient.PENDING,
-          'insufficient': owners_client.OwnersClient.INSUFFICIENT_REVIEWERS,
+    def setUp(self):
+        self.owners = {}
+        self.client = TestClient(self.owners)
+
+    def testGetFilesApprovalStatus(self):
+        self.client.owners_by_path = {
+            'approved': ['approver@example.com'],
+            'pending': ['reviewer@example.com'],
+            'insufficient': ['insufficient@example.com'],
+            'everyone': [owners_client.OwnersClient.EVERYONE],
+        }
+        self.assertEqual(
+            self.client.GetFilesApprovalStatus(
+                ['approved', 'pending', 'insufficient'],
+                ['approver@example.com'], ['reviewer@example.com']), {
+                    'approved': owners_client.OwnersClient.APPROVED,
+                    'pending': owners_client.OwnersClient.PENDING,
+                    'insufficient':
+                    owners_client.OwnersClient.INSUFFICIENT_REVIEWERS,
+                })
+        self.assertEqual(
+            self.client.GetFilesApprovalStatus(['everyone'],
+                                               ['anyone@example.com'], []),
+            {'everyone': owners_client.OwnersClient.APPROVED})
+        self.assertEqual(
+            self.client.GetFilesApprovalStatus(['everyone'], [],
+                                               ['anyone@example.com']),
+            {'everyone': owners_client.OwnersClient.PENDING})
+        self.assertEqual(
+            self.client.GetFilesApprovalStatus(['everyone'], [], []),
+            {'everyone': owners_client.OwnersClient.INSUFFICIENT_REVIEWERS})
+
+    def testScoreOwners(self):
+        self.client.owners_by_path = {'a': [alice, bob, chris]}
+        self.assertEqual(
+            self.client.ScoreOwners(self.client.owners_by_path.keys()),
+            [alice, bob, chris])
+
+        self.client.owners_by_path = {
+            'a': [alice, bob],
+            'b': [bob],
+            'c': [bob, chris]
+        }
+        self.assertEqual(
+            self.client.ScoreOwners(self.client.owners_by_path.keys()),
+            [alice, bob, chris])
+
+        self.client.owners_by_path = {
+            'a': [alice, bob],
+            'b': [bob],
+            'c': [bob, chris]
+        }
+        self.assertEqual(
+            self.client.ScoreOwners(self.client.owners_by_path.keys(),
+                                    exclude=[chris]),
+            [alice, bob],
+        )
+
+        self.client.owners_by_path = {
+            'a': [alice, bob, chris, dave],
+            'b': [chris, bob, dave],
+            'c': [chris, dave],
+            'd': [alice, chris, dave]
+        }
+        self.assertEqual(
+            self.client.ScoreOwners(self.client.owners_by_path.keys()),
+            [alice, chris, bob, dave])
+
+    def assertSuggestsOwners(self, owners_by_path, exclude=None):
+        self.client.owners_by_path = owners_by_path
+        suggested = self.client.SuggestOwners(owners_by_path.keys(),
+                                              exclude=exclude)
+
+        # Owners should appear only once
+        self.assertEqual(len(suggested), len(set(suggested)))
+
+        # All paths should be covered.
+        suggested = set(suggested)
+        for owners in owners_by_path.values():
+            self.assertTrue(suggested & set(owners))
+
+        # No excluded owners should be present.
+        if exclude:
+            for owner in suggested:
+                self.assertNotIn(owner, exclude)
+
+    def testSuggestOwners(self):
+        self.assertSuggestsOwners({})
+        self.assertSuggestsOwners({'a': [alice]})
+        self.assertSuggestsOwners({'abcd': [alice, bob, chris, dave]})
+        self.assertSuggestsOwners({'abcd': [alice, bob, chris, dave]},
+                                  exclude=[alice, bob])
+        self.assertSuggestsOwners({
+            'ae': [alice, emily],
+            'be': [bob, emily],
+            'ce': [chris, emily],
+            'de': [dave, emily]
+        })
+        self.assertSuggestsOwners({
+            'ad': [alice, dave],
+            'cad': [chris, alice, dave],
+            'ead': [emily, alice, dave],
+            'bd': [bob, dave]
+        })
+        self.assertSuggestsOwners({
+            'a': [alice],
+            'b': [bob],
+            'c': [chris],
+            'ad': [alice, dave]
+        })
+        self.assertSuggestsOwners({
+            'abc': [alice, bob, chris],
+            'acb': [alice, chris, bob],
+            'bac': [bob, alice, chris],
+            'bca': [bob, chris, alice],
+            'cab': [chris, alice, bob],
+            'cba': [chris, bob, alice]
         })
-    self.assertEqual(
-        self.client.GetFilesApprovalStatus(
-            ['everyone'], ['anyone@example.com'], []),
-        {'everyone': owners_client.OwnersClient.APPROVED})
-    self.assertEqual(
-        self.client.GetFilesApprovalStatus(
-            ['everyone'], [], ['anyone@example.com']),
-        {'everyone': owners_client.OwnersClient.PENDING})
-    self.assertEqual(
-        self.client.GetFilesApprovalStatus(['everyone'], [], []),
-        {'everyone': owners_client.OwnersClient.INSUFFICIENT_REVIEWERS})
-
-  def testScoreOwners(self):
-    self.client.owners_by_path = {
-        'a': [alice, bob, chris]
-    }
-    self.assertEqual(
-      self.client.ScoreOwners(self.client.owners_by_path.keys()),
-      [alice, bob, chris]
-    )
-
-    self.client.owners_by_path = {
-        'a': [alice, bob],
-        'b': [bob],
-        'c': [bob, chris]
-    }
-    self.assertEqual(
-      self.client.ScoreOwners(self.client.owners_by_path.keys()),
-      [alice, bob, chris]
-    )
-
-    self.client.owners_by_path = {
-        'a': [alice, bob],
-        'b': [bob],
-        'c': [bob, chris]
-    }
-    self.assertEqual(
-      self.client.ScoreOwners(
-          self.client.owners_by_path.keys(), exclude=[chris]),
-      [alice, bob],
-    )
-
-    self.client.owners_by_path = {
-        'a': [alice, bob, chris, dave],
-        'b': [chris, bob, dave],
-        'c': [chris, dave],
-        'd': [alice, chris, dave]
-    }
-    self.assertEqual(
-      self.client.ScoreOwners(self.client.owners_by_path.keys()),
-      [alice, chris, bob, dave]
-    )
-
-  def assertSuggestsOwners(self, owners_by_path, exclude=None):
-    self.client.owners_by_path = owners_by_path
-    suggested = self.client.SuggestOwners(
-        owners_by_path.keys(), exclude=exclude)
-
-    # Owners should appear only once
-    self.assertEqual(len(suggested), len(set(suggested)))
-
-    # All paths should be covered.
-    suggested = set(suggested)
-    for owners in owners_by_path.values():
-      self.assertTrue(suggested & set(owners))
-
-    # No excluded owners should be present.
-    if exclude:
-      for owner in suggested:
-        self.assertNotIn(owner, exclude)
-
-  def testSuggestOwners(self):
-    self.assertSuggestsOwners({})
-    self.assertSuggestsOwners({'a': [alice]})
-    self.assertSuggestsOwners({'abcd': [alice, bob, chris, dave]})
-    self.assertSuggestsOwners(
-        {'abcd': [alice, bob, chris, dave]},
-        exclude=[alice, bob])
-    self.assertSuggestsOwners(
-        {'ae': [alice, emily],
-         'be': [bob, emily],
-         'ce': [chris, emily],
-         'de': [dave, emily]})
-    self.assertSuggestsOwners(
-        {'ad': [alice, dave],
-         'cad': [chris, alice, dave],
-         'ead': [emily, alice, dave],
-         'bd': [bob, dave]})
-    self.assertSuggestsOwners(
-        {'a': [alice],
-         'b': [bob],
-         'c': [chris],
-         'ad': [alice, dave]})
-    self.assertSuggestsOwners(
-        {'abc': [alice, bob, chris],
-         'acb': [alice, chris, bob],
-         'bac': [bob, alice, chris],
-         'bca': [bob, chris, alice],
-         'cab': [chris, alice, bob],
-         'cba': [chris, bob, alice]})
-
-    # Check that we can handle a large amount of files with unrelated owners.
-    self.assertSuggestsOwners(
-        {str(x): [str(x)] for x in range(100)})
-
-  def testBatchListOwners(self):
-    self.client.owners_by_path = {
-        'bar/everyone/foo.txt': [alice, bob],
-        'bar/everyone/bar.txt': [bob],
-        'bar/foo/': [bob, chris]
-    }
-
-    self.assertEqual(
-        {
+
+        # Check that we can handle a large amount of files with unrelated
+        # owners.
+        self.assertSuggestsOwners({str(x): [str(x)] for x in range(100)})
+
+    def testBatchListOwners(self):
+        self.client.owners_by_path = {
             'bar/everyone/foo.txt': [alice, bob],
             'bar/everyone/bar.txt': [bob],
             'bar/foo/': [bob, chris]
-        },
-        self.client.BatchListOwners(
-            ['bar/everyone/foo.txt', 'bar/everyone/bar.txt', 'bar/foo/']))
+        }
+
+        self.assertEqual(
+            {
+                'bar/everyone/foo.txt': [alice, bob],
+                'bar/everyone/bar.txt': [bob],
+                'bar/foo/': [bob, chris]
+            },
+            self.client.BatchListOwners(
+                ['bar/everyone/foo.txt', 'bar/everyone/bar.txt', 'bar/foo/']))
 
 
 class GetCodeOwnersClientTest(unittest.TestCase):
-  def setUp(self):
-    mock.patch('gerrit_util.IsCodeOwnersEnabledOnHost').start()
-    self.addCleanup(mock.patch.stopall)
+    def setUp(self):
+        mock.patch('gerrit_util.IsCodeOwnersEnabledOnHost').start()
+        self.addCleanup(mock.patch.stopall)
 
-  def testGetCodeOwnersClient_CodeOwnersEnabled(self):
-    gerrit_util.IsCodeOwnersEnabledOnHost.return_value = True
-    self.assertIsInstance(
-        owners_client.GetCodeOwnersClient('host', 'project', 'branch'),
-        owners_client.GerritClient)
+    def testGetCodeOwnersClient_CodeOwnersEnabled(self):
+        gerrit_util.IsCodeOwnersEnabledOnHost.return_value = True
+        self.assertIsInstance(
+            owners_client.GetCodeOwnersClient('host', 'project', 'branch'),
+            owners_client.GerritClient)
 
-  def testGetCodeOwnersClient_CodeOwnersDisabled(self):
-    gerrit_util.IsCodeOwnersEnabledOnHost.return_value = False
-    with self.assertRaises(Exception):
-      owners_client.GetCodeOwnersClient('', '', '')
+    def testGetCodeOwnersClient_CodeOwnersDisabled(self):
+        gerrit_util.IsCodeOwnersEnabledOnHost.return_value = False
+        with self.assertRaises(Exception):
+            owners_client.GetCodeOwnersClient('', '', '')
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 223 - 207
tests/owners_finder_test.py

@@ -2,7 +2,6 @@
 # Copyright 2013 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for owners_finder.py."""
 
 import os
@@ -17,7 +16,6 @@ from testing_support import filesystem_mock
 import owners_finder
 import owners_client
 
-
 ben = 'ben@example.com'
 brett = 'brett@example.com'
 darin = 'darin@example.com'
@@ -29,229 +27,247 @@ tom = 'tom@example.com'
 nonowner = 'nonowner@example.com'
 
 
-
-
 def owners_file(*email_addresses, **kwargs):
-  s = ''
-  if kwargs.get('comment'):
-    s += '# %s\n' % kwargs.get('comment')
-  if kwargs.get('noparent'):
-    s += 'set noparent\n'
-  return s + '\n'.join(email_addresses) + '\n'
+    s = ''
+    if kwargs.get('comment'):
+        s += '# %s\n' % kwargs.get('comment')
+    if kwargs.get('noparent'):
+        s += 'set noparent\n'
+    return s + '\n'.join(email_addresses) + '\n'
 
 
 class TestClient(owners_client.OwnersClient):
-  def __init__(self):
-    super(TestClient, self).__init__()
-    self.owners_by_path = {
-      'DEPS': [ken, peter, tom],
-      'base/vlog.h': [ken, peter, tom],
-      'chrome/browser/defaults.h': [brett, ben, ken, peter, tom],
-      'chrome/gpu/gpu_channel.h': [ken, ben, brett, ken, peter, tom],
-      'chrome/renderer/gpu/gpu_channel_host.h': [peter, ben, brett, ken, tom],
-      'chrome/renderer/safe_browsing/scorer.h': [peter, ben, brett, ken, tom],
-      'content/content.gyp': [john, darin],
-      'content/bar/foo.cc': [john, darin],
-      'content/baz/froboz.h': [brett, john, darin],
-      'content/baz/ugly.cc': [brett, john, darin],
-      'content/baz/ugly.h': [brett, john, darin],
-      'content/common/common.cc': [jochen, john, darin],
-      'content/foo/foo.cc': [jochen, john, darin],
-      'content/views/pie.h': [ben, john, self.EVERYONE],
-    }
-
-  def ListOwners(self, path):
-    path = path.replace(os.sep, '/')
-    return self.owners_by_path[path]
+    def __init__(self):
+        super(TestClient, self).__init__()
+        self.owners_by_path = {
+            'DEPS': [ken, peter, tom],
+            'base/vlog.h': [ken, peter, tom],
+            'chrome/browser/defaults.h': [brett, ben, ken, peter, tom],
+            'chrome/gpu/gpu_channel.h': [ken, ben, brett, ken, peter, tom],
+            'chrome/renderer/gpu/gpu_channel_host.h':
+            [peter, ben, brett, ken, tom],
+            'chrome/renderer/safe_browsing/scorer.h':
+            [peter, ben, brett, ken, tom],
+            'content/content.gyp': [john, darin],
+            'content/bar/foo.cc': [john, darin],
+            'content/baz/froboz.h': [brett, john, darin],
+            'content/baz/ugly.cc': [brett, john, darin],
+            'content/baz/ugly.h': [brett, john, darin],
+            'content/common/common.cc': [jochen, john, darin],
+            'content/foo/foo.cc': [jochen, john, darin],
+            'content/views/pie.h': [ben, john, self.EVERYONE],
+        }
+
+    def ListOwners(self, path):
+        path = path.replace(os.sep, '/')
+        return self.owners_by_path[path]
 
 
 class OutputInterceptedOwnersFinder(owners_finder.OwnersFinder):
-  def __init__(
-      self, files, author, reviewers, client, disable_color=False):
-    super(OutputInterceptedOwnersFinder, self).__init__(
-      files, author, reviewers, client, disable_color=disable_color)
-    self.output = []
-    self.indentation_stack = []
+    def __init__(self, files, author, reviewers, client, disable_color=False):
+        super(OutputInterceptedOwnersFinder,
+              self).__init__(files,
+                             author,
+                             reviewers,
+                             client,
+                             disable_color=disable_color)
+        self.output = []
+        self.indentation_stack = []
 
-  def resetText(self):
-    self.output = []
-    self.indentation_stack = []
+    def resetText(self):
+        self.output = []
+        self.indentation_stack = []
 
-  def indent(self):
-    self.indentation_stack.append(self.output)
-    self.output = []
+    def indent(self):
+        self.indentation_stack.append(self.output)
+        self.output = []
 
-  def unindent(self):
-    block = self.output
-    self.output = self.indentation_stack.pop()
-    self.output.append(block)
+    def unindent(self):
+        block = self.output
+        self.output = self.indentation_stack.pop()
+        self.output.append(block)
 
-  def writeln(self, text=''):
-    self.output.append(text)
+    def writeln(self, text=''):
+        self.output.append(text)
 
 
 class _BaseTestCase(unittest.TestCase):
-  default_files = [
-    'base/vlog.h',
-    'chrome/browser/defaults.h',
-    'chrome/gpu/gpu_channel.h',
-    'chrome/renderer/gpu/gpu_channel_host.h',
-    'chrome/renderer/safe_browsing/scorer.h',
-    'content/content.gyp',
-    'content/bar/foo.cc',
-    'content/baz/ugly.cc',
-    'content/baz/ugly.h',
-    'content/views/pie.h'
-  ]
-
-  def ownersFinder(self, files, author=nonowner, reviewers=None):
-    reviewers = reviewers or []
-    return OutputInterceptedOwnersFinder(
-        files, author, reviewers, TestClient(), disable_color=True)
-
-  def defaultFinder(self):
-    return self.ownersFinder(self.default_files)
+    default_files = [
+        'base/vlog.h', 'chrome/browser/defaults.h', 'chrome/gpu/gpu_channel.h',
+        'chrome/renderer/gpu/gpu_channel_host.h',
+        'chrome/renderer/safe_browsing/scorer.h', 'content/content.gyp',
+        'content/bar/foo.cc', 'content/baz/ugly.cc', 'content/baz/ugly.h',
+        'content/views/pie.h'
+    ]
 
+    def ownersFinder(self, files, author=nonowner, reviewers=None):
+        reviewers = reviewers or []
+        return OutputInterceptedOwnersFinder(files,
+                                             author,
+                                             reviewers,
+                                             TestClient(),
+                                             disable_color=True)
 
-class OwnersFinderTests(_BaseTestCase):
-  def test_constructor(self):
-    self.assertNotEqual(self.defaultFinder(), None)
+    def defaultFinder(self):
+        return self.ownersFinder(self.default_files)
 
-  def test_skip_files_owned_by_reviewers(self):
-    files = [
-        'chrome/browser/defaults.h',  # owned by brett
-        'content/bar/foo.cc',         # not owned by brett
-    ]
-    finder = self.ownersFinder(files, reviewers=[brett])
-    self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
 
-  def test_skip_files_owned_by_author(self):
-    files = [
-        'chrome/browser/defaults.h',  # owned by brett
-        'content/bar/foo.cc',         # not owned by brett
-    ]
-    finder = self.ownersFinder(files, author=brett)
-    self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
-
-  def test_native_path_sep(self):
-    # Create a path with backslashes on Windows to make sure these are handled.
-    # This test is a harmless duplicate on other platforms.
-    native_slashes_path = 'chrome/browser/defaults.h'.replace('/', os.sep)
-    files = [
-        native_slashes_path,          # owned by brett
-        'content/bar/foo.cc',         # not owned by brett
-    ]
-    finder = self.ownersFinder(files, reviewers=[brett])
-    self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
-
-  @mock.patch('owners_client.OwnersClient.ScoreOwners')
-  def test_reset(self, mockScoreOwners):
-    mockScoreOwners.return_value = [brett, darin, john, peter, ken, ben, tom]
-    finder = self.defaultFinder()
-    for _ in range(2):
-      expected = [brett, darin, john, peter, ken, ben, tom]
-      self.assertEqual(finder.owners_queue, expected)
-      self.assertEqual(finder.unreviewed_files, {
-          'base/vlog.h',
-          'chrome/browser/defaults.h',
-          'chrome/gpu/gpu_channel.h',
-          'chrome/renderer/gpu/gpu_channel_host.h',
-          'chrome/renderer/safe_browsing/scorer.h',
-          'content/content.gyp',
-          'content/bar/foo.cc',
-          'content/baz/ugly.cc',
-          'content/baz/ugly.h'
-      })
-      self.assertEqual(finder.selected_owners, set())
-      self.assertEqual(finder.deselected_owners, set())
-      self.assertEqual(finder.reviewed_by, {})
-      self.assertEqual(finder.output, [])
-
-      finder.select_owner(john)
-      finder.reset()
-      finder.resetText()
-
-  @mock.patch('owners_client.OwnersClient.ScoreOwners')
-  def test_select(self, mockScoreOwners):
-    mockScoreOwners.return_value = [brett, darin, john, peter, ken, ben, tom]
-    finder = self.defaultFinder()
-    finder.select_owner(john)
-    self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
-    self.assertEqual(finder.selected_owners, {john})
-    self.assertEqual(finder.deselected_owners, {darin})
-    self.assertEqual(finder.reviewed_by, {'content/bar/foo.cc': john,
-                                          'content/baz/ugly.cc': john,
-                                          'content/baz/ugly.h': john,
-                                          'content/content.gyp': john})
-    self.assertEqual(finder.output,
-                     ['Selected: ' + john, 'Deselected: ' + darin])
-
-    finder = self.defaultFinder()
-    finder.select_owner(darin)
-    self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
-    self.assertEqual(finder.selected_owners, {darin})
-    self.assertEqual(finder.deselected_owners, {john})
-    self.assertEqual(finder.reviewed_by, {'content/bar/foo.cc': darin,
-                                          'content/baz/ugly.cc': darin,
-                                          'content/baz/ugly.h': darin,
-                                          'content/content.gyp': darin})
-    self.assertEqual(finder.output,
-                     ['Selected: ' + darin, 'Deselected: ' + john])
-
-    finder = self.defaultFinder()
-    finder.select_owner(brett)
-    expected = [darin, john, peter, ken, tom]
-    self.assertEqual(finder.owners_queue, expected)
-    self.assertEqual(finder.selected_owners, {brett})
-    self.assertEqual(finder.deselected_owners, {ben})
-    self.assertEqual(finder.reviewed_by,
-                     {'chrome/browser/defaults.h': brett,
-                      'chrome/gpu/gpu_channel.h': brett,
-                      'chrome/renderer/gpu/gpu_channel_host.h': brett,
-                      'chrome/renderer/safe_browsing/scorer.h': brett,
-                      'content/baz/ugly.cc': brett,
-                      'content/baz/ugly.h': brett})
-    self.assertEqual(finder.output,
-                     ['Selected: ' + brett, 'Deselected: ' + ben])
-
-  @mock.patch('owners_client.OwnersClient.ScoreOwners')
-  def test_deselect(self, mockScoreOwners):
-    mockScoreOwners.return_value = [brett, darin, john, peter, ken, ben, tom]
-    finder = self.defaultFinder()
-    finder.deselect_owner(john)
-    self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
-    self.assertEqual(finder.selected_owners, {darin})
-    self.assertEqual(finder.deselected_owners, {john})
-    self.assertEqual(finder.reviewed_by, {'content/bar/foo.cc': darin,
-                                          'content/baz/ugly.cc': darin,
-                                          'content/baz/ugly.h': darin,
-                                          'content/content.gyp': darin})
-    self.assertEqual(finder.output,
-                     ['Deselected: ' + john, 'Selected: ' + darin])
-
-  def test_print_file_info(self):
-    finder = self.defaultFinder()
-    finder.print_file_info('chrome/browser/defaults.h')
-    self.assertEqual(finder.output, ['chrome/browser/defaults.h [5]'])
-    finder.resetText()
-
-    finder.print_file_info('chrome/renderer/gpu/gpu_channel_host.h')
-    self.assertEqual(finder.output,
-                     ['chrome/renderer/gpu/gpu_channel_host.h [5]'])
-
-  def test_print_file_info_detailed(self):
-    finder = self.defaultFinder()
-    finder.print_file_info_detailed('chrome/browser/defaults.h')
-    self.assertEqual(finder.output,
-                     ['chrome/browser/defaults.h',
-                       [ben, brett, ken, peter, tom]])
-    finder.resetText()
-
-    finder.print_file_info_detailed('chrome/renderer/gpu/gpu_channel_host.h')
-    self.assertEqual(finder.output,
-                     ['chrome/renderer/gpu/gpu_channel_host.h',
-                       [ben, brett, ken, peter, tom]])
+class OwnersFinderTests(_BaseTestCase):
+    def test_constructor(self):
+        self.assertNotEqual(self.defaultFinder(), None)
+
+    def test_skip_files_owned_by_reviewers(self):
+        files = [
+            'chrome/browser/defaults.h',  # owned by brett
+            'content/bar/foo.cc',  # not owned by brett
+        ]
+        finder = self.ownersFinder(files, reviewers=[brett])
+        self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
+
+    def test_skip_files_owned_by_author(self):
+        files = [
+            'chrome/browser/defaults.h',  # owned by brett
+            'content/bar/foo.cc',  # not owned by brett
+        ]
+        finder = self.ownersFinder(files, author=brett)
+        self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
+
+    def test_native_path_sep(self):
+        # Create a path with backslashes on Windows to make sure these are
+        # handled. This test is a harmless duplicate on other platforms.
+        native_slashes_path = 'chrome/browser/defaults.h'.replace('/', os.sep)
+        files = [
+            native_slashes_path,  # owned by brett
+            'content/bar/foo.cc',  # not owned by brett
+        ]
+        finder = self.ownersFinder(files, reviewers=[brett])
+        self.assertEqual(finder.unreviewed_files, {'content/bar/foo.cc'})
+
+    @mock.patch('owners_client.OwnersClient.ScoreOwners')
+    def test_reset(self, mockScoreOwners):
+        mockScoreOwners.return_value = [
+            brett, darin, john, peter, ken, ben, tom
+        ]
+        finder = self.defaultFinder()
+        for _ in range(2):
+            expected = [brett, darin, john, peter, ken, ben, tom]
+            self.assertEqual(finder.owners_queue, expected)
+            self.assertEqual(
+                finder.unreviewed_files, {
+                    'base/vlog.h', 'chrome/browser/defaults.h',
+                    'chrome/gpu/gpu_channel.h',
+                    'chrome/renderer/gpu/gpu_channel_host.h',
+                    'chrome/renderer/safe_browsing/scorer.h',
+                    'content/content.gyp', 'content/bar/foo.cc',
+                    'content/baz/ugly.cc', 'content/baz/ugly.h'
+                })
+            self.assertEqual(finder.selected_owners, set())
+            self.assertEqual(finder.deselected_owners, set())
+            self.assertEqual(finder.reviewed_by, {})
+            self.assertEqual(finder.output, [])
+
+            finder.select_owner(john)
+            finder.reset()
+            finder.resetText()
+
+    @mock.patch('owners_client.OwnersClient.ScoreOwners')
+    def test_select(self, mockScoreOwners):
+        mockScoreOwners.return_value = [
+            brett, darin, john, peter, ken, ben, tom
+        ]
+        finder = self.defaultFinder()
+        finder.select_owner(john)
+        self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
+        self.assertEqual(finder.selected_owners, {john})
+        self.assertEqual(finder.deselected_owners, {darin})
+        self.assertEqual(
+            finder.reviewed_by, {
+                'content/bar/foo.cc': john,
+                'content/baz/ugly.cc': john,
+                'content/baz/ugly.h': john,
+                'content/content.gyp': john
+            })
+        self.assertEqual(finder.output,
+                         ['Selected: ' + john, 'Deselected: ' + darin])
+
+        finder = self.defaultFinder()
+        finder.select_owner(darin)
+        self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
+        self.assertEqual(finder.selected_owners, {darin})
+        self.assertEqual(finder.deselected_owners, {john})
+        self.assertEqual(
+            finder.reviewed_by, {
+                'content/bar/foo.cc': darin,
+                'content/baz/ugly.cc': darin,
+                'content/baz/ugly.h': darin,
+                'content/content.gyp': darin
+            })
+        self.assertEqual(finder.output,
+                         ['Selected: ' + darin, 'Deselected: ' + john])
+
+        finder = self.defaultFinder()
+        finder.select_owner(brett)
+        expected = [darin, john, peter, ken, tom]
+        self.assertEqual(finder.owners_queue, expected)
+        self.assertEqual(finder.selected_owners, {brett})
+        self.assertEqual(finder.deselected_owners, {ben})
+        self.assertEqual(
+            finder.reviewed_by, {
+                'chrome/browser/defaults.h': brett,
+                'chrome/gpu/gpu_channel.h': brett,
+                'chrome/renderer/gpu/gpu_channel_host.h': brett,
+                'chrome/renderer/safe_browsing/scorer.h': brett,
+                'content/baz/ugly.cc': brett,
+                'content/baz/ugly.h': brett
+            })
+        self.assertEqual(finder.output,
+                         ['Selected: ' + brett, 'Deselected: ' + ben])
+
+    @mock.patch('owners_client.OwnersClient.ScoreOwners')
+    def test_deselect(self, mockScoreOwners):
+        mockScoreOwners.return_value = [
+            brett, darin, john, peter, ken, ben, tom
+        ]
+        finder = self.defaultFinder()
+        finder.deselect_owner(john)
+        self.assertEqual(finder.owners_queue, [brett, peter, ken, ben, tom])
+        self.assertEqual(finder.selected_owners, {darin})
+        self.assertEqual(finder.deselected_owners, {john})
+        self.assertEqual(
+            finder.reviewed_by, {
+                'content/bar/foo.cc': darin,
+                'content/baz/ugly.cc': darin,
+                'content/baz/ugly.h': darin,
+                'content/content.gyp': darin
+            })
+        self.assertEqual(finder.output,
+                         ['Deselected: ' + john, 'Selected: ' + darin])
+
+    def test_print_file_info(self):
+        finder = self.defaultFinder()
+        finder.print_file_info('chrome/browser/defaults.h')
+        self.assertEqual(finder.output, ['chrome/browser/defaults.h [5]'])
+        finder.resetText()
+
+        finder.print_file_info('chrome/renderer/gpu/gpu_channel_host.h')
+        self.assertEqual(finder.output,
+                         ['chrome/renderer/gpu/gpu_channel_host.h [5]'])
+
+    def test_print_file_info_detailed(self):
+        finder = self.defaultFinder()
+        finder.print_file_info_detailed('chrome/browser/defaults.h')
+        self.assertEqual(
+            finder.output,
+            ['chrome/browser/defaults.h', [ben, brett, ken, peter, tom]])
+        finder.resetText()
+
+        finder.print_file_info_detailed(
+            'chrome/renderer/gpu/gpu_channel_host.h')
+        self.assertEqual(finder.output, [
+            'chrome/renderer/gpu/gpu_channel_host.h',
+            [ben, brett, ken, peter, tom]
+        ])
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 404 - 389
tests/presubmit_canned_checks_test.py

@@ -18,412 +18,427 @@ from testing_support.presubmit_canned_checks_test_mocks import MockChange
 
 import presubmit_canned_checks
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class InclusiveLanguageCheckTest(unittest.TestCase):
-  def testBlockedTerms(self):
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    input_api.presubmit_local_path = ''
-
-    input_api.files = [
-        MockFile(
-            os.path.normpath(
-                'infra/inclusive_language_presubmit_exempt_dirs.txt'), [
-                    'some/dir 2 1',
-                    'some/other/dir 2 1',
+    def testBlockedTerms(self):
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        input_api.presubmit_local_path = ''
+
+        input_api.files = [
+            MockFile(
+                os.path.normpath(
+                    'infra/inclusive_language_presubmit_exempt_dirs.txt'), [
+                        'some/dir 2 1',
+                        'some/other/dir 2 1',
+                    ]),
+            MockFile(
+                os.path.normpath('some/ios/file.mm'),
+                [
+                    'TEST(SomeClassTest, SomeInteraction, blacklist) {',  # nocheck
+                    '}'
+                ]),
+            MockFile(
+                os.path.normpath('some/mac/file.mm'),
+                [
+                    'TEST(SomeClassTest, SomeInteraction, BlackList) {',  # nocheck
+                    '}'
+                ]),
+            MockFile(os.path.normpath('another/ios_file.mm'),
+                     ['class SomeTest : public testing::Test blocklist {};']),
+            MockFile(os.path.normpath('some/ios/file_egtest.mm'),
+                     ['- (void)testSomething { V(whitelist); }']),  # nocheck
+            MockFile(
+                os.path.normpath('some/ios/file_unittest.mm'),
+                ['TEST_F(SomeTest, Whitelist) { V(allowlist); }']),  # nocheck
+            MockFile(
+                os.path.normpath('some/doc/file.md'),
+                [
+                    '# Title',
+                    'Some markdown text includes master.',  # nocheck
+                ]),
+            MockFile(
+                os.path.normpath('some/doc/ok_file.md'),
+                [
+                    '# Title',
+                    # This link contains a '//' which the matcher thinks is a
+                    # C-style comment, and the 'master' term appears after the
+                    # '//' in the URL, so it gets ignored as a side-effect.
+                    '[Ignored](https://git/project.git/+/master/foo)',  # nocheck
+                ]),
+            MockFile(
+                os.path.normpath('some/doc/branch_name_file.md'),
+                [
+                    '# Title',
+                    # Matches appearing before `//` still trigger the check.
+                    '[src/master](https://git/p.git/+/master/foo)',  # nocheck
+                ]),
+            MockFile(
+                os.path.normpath('some/java/file/TestJavaDoc.java'),
+                [
+                    '/**',
+                    ' * This line contains the word master,',  # nocheck
+                    '* ignored because this is a comment. See {@link',
+                    ' * https://s/src/+/master:tools/README.md}',  # nocheck
+                    ' */'
+                ]),
+            MockFile(
+                os.path.normpath('some/java/file/TestJava.java'),
+                [
+                    'class TestJava {',
+                    '  public String master;',  # nocheck
+                    '}'
+                ]),
+            MockFile(
+                os.path.normpath('some/html/file.html'),
+                [
+                    '<-- an existing html multiline comment',
+                    'says "master" here',  # nocheck
+                    'in the comment -->'
+                ])
+        ]
+
+        errors = presubmit_canned_checks.CheckInclusiveLanguage(
+            input_api, MockOutputApi())
+        self.assertEqual(1, len(errors))
+        self.assertTrue(
+            os.path.normpath('some/ios/file.mm') in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('another/ios_file.mm') not in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/mac/file.mm') in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/ios/file_egtest.mm') in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/ios/file_unittest.mm') in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/doc/file.md') not in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/doc/ok_file.md') not in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/doc/branch_name_file.md') not in
+            errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/java/file/TestJavaDoc.java') not in
+            errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/java/file/TestJava.java') not in
+            errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/html/file.html') not in errors[0].message)
+
+    def testBlockedTermsWithLegacy(self):
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        input_api.presubmit_local_path = ''
+
+        input_api.files = [
+            MockFile(
+                os.path.normpath(
+                    'infra/inclusive_language_presubmit_exempt_dirs.txt'), [
+                        'some/ios 2 1',
+                        'some/other/dir 2 1',
+                    ]),
+            MockFile(
+                os.path.normpath('some/ios/file.mm'),
+                [
+                    'TEST(SomeClassTest, SomeInteraction, blacklist) {',  # nocheck
+                    '}'
+                ]),
+            MockFile(
+                os.path.normpath('some/ios/subdir/file.mm'),
+                [
+                    'TEST(SomeClassTest, SomeInteraction, blacklist) {',  # nocheck
+                    '}'
+                ]),
+            MockFile(
+                os.path.normpath('some/mac/file.mm'),
+                [
+                    'TEST(SomeClassTest, SomeInteraction, BlackList) {',  # nocheck
+                    '}'
+                ]),
+            MockFile(os.path.normpath('another/ios_file.mm'),
+                     ['class SomeTest : public testing::Test blocklist {};']),
+            MockFile(os.path.normpath('some/ios/file_egtest.mm'),
+                     ['- (void)testSomething { V(whitelist); }']),  # nocheck
+            MockFile(
+                os.path.normpath('some/ios/file_unittest.mm'),
+                ['TEST_F(SomeTest, Whitelist) { V(allowlist); }']),  # nocheck
+        ]
+
+        errors = presubmit_canned_checks.CheckInclusiveLanguage(
+            input_api, MockOutputApi())
+        self.assertEqual(1, len(errors))
+        self.assertTrue(
+            os.path.normpath('some/ios/file.mm') not in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/ios/subdir/file.mm') in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('another/ios_file.mm') not in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/mac/file.mm') in errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/ios/file_egtest.mm') not in
+            errors[0].message)
+        self.assertTrue(
+            os.path.normpath('some/ios/file_unittest.mm') not in
+            errors[0].message)
+
+    def testBlockedTermsWithNocheck(self):
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        input_api.presubmit_local_path = ''
+
+        input_api.files = [
+            MockFile(
+                os.path.normpath(
+                    'infra/inclusive_language_presubmit_exempt_dirs.txt'), [
+                        'some/dir 2 1',
+                        'some/other/dir 2 1',
+                    ]),
+            MockFile(
+                os.path.normpath('some/ios/file.mm'),
+                [
+                    'TEST(SomeClassTest, SomeInteraction, ',
+                    ' blacklist) { // nocheck',  # nocheck
+                    '}'
+                ]),
+            MockFile(
+                os.path.normpath('some/mac/file.mm'),
+                [
+                    'TEST(SomeClassTest, SomeInteraction, ',
+                    'BlackList) { // nocheck',  # nocheck
+                    '}'
+                ]),
+            MockFile(os.path.normpath('another/ios_file.mm'),
+                     ['class SomeTest : public testing::Test blocklist {};']),
+            MockFile(os.path.normpath('some/ios/file_egtest.mm'),
+                     ['- (void)testSomething { ', 'V(whitelist); } // nocheck'
+                      ]),  # nocheck
+            MockFile(
+                os.path.normpath('some/ios/file_unittest.mm'),
+                [
+                    'TEST_F(SomeTest, Whitelist) // nocheck',  # nocheck
+                    ' { V(allowlist); }'
+                ]),
+            MockFile(
+                os.path.normpath('some/doc/file.md'),
+                [
+                    'Master in markdown <!-- nocheck -->',  # nocheck
+                    '## Subheading is okay'
                 ]),
-        MockFile(
-            os.path.normpath('some/ios/file.mm'),
-            [
-                'TEST(SomeClassTest, SomeInteraction, blacklist) {',  # nocheck
-                '}'
-            ]),
-        MockFile(
-            os.path.normpath('some/mac/file.mm'),
-            [
-                'TEST(SomeClassTest, SomeInteraction, BlackList) {',  # nocheck
-                '}'
-            ]),
-        MockFile(os.path.normpath('another/ios_file.mm'),
-                 ['class SomeTest : public testing::Test blocklist {};']),
-        MockFile(os.path.normpath('some/ios/file_egtest.mm'),
-                 ['- (void)testSomething { V(whitelist); }']),  # nocheck
-        MockFile(os.path.normpath('some/ios/file_unittest.mm'),
-                 ['TEST_F(SomeTest, Whitelist) { V(allowlist); }']),  # nocheck
-        MockFile(
-            os.path.normpath('some/doc/file.md'),
-            [
-                '# Title',
-                'Some markdown text includes master.',  # nocheck
-            ]),
-        MockFile(
-            os.path.normpath('some/doc/ok_file.md'),
-            [
-                '# Title',
-                # This link contains a '//' which the matcher thinks is a
-                # C-style comment, and the 'master' term appears after the
-                # '//' in the URL, so it gets ignored as a side-effect.
-                '[Ignored](https://git/project.git/+/master/foo)',  # nocheck
-            ]),
-        MockFile(
-            os.path.normpath('some/doc/branch_name_file.md'),
-            [
-                '# Title',
-                # Matches appearing before `//` still trigger the check.
-                '[src/master](https://git/p.git/+/master/foo)',  # nocheck
-            ]),
-        MockFile(
-            os.path.normpath('some/java/file/TestJavaDoc.java'),
-            [
-                '/**',
-                ' * This line contains the word master,',  # nocheck
-                '* ignored because this is a comment. See {@link',
-                ' * https://s/src/+/master:tools/README.md}',  # nocheck
-                ' */'
-            ]),
-        MockFile(
-            os.path.normpath('some/java/file/TestJava.java'),
-            [
-                'class TestJava {',
-                '  public String master;',  # nocheck
-                '}'
-            ]),
-        MockFile(
-            os.path.normpath('some/html/file.html'),
-            [
-                '<-- an existing html multiline comment',
-                'says "master" here',  # nocheck
-                'in the comment -->'
-            ])
-    ]
-
-    errors = presubmit_canned_checks.CheckInclusiveLanguage(
-        input_api, MockOutputApi())
-    self.assertEqual(1, len(errors))
-    self.assertTrue(os.path.normpath('some/ios/file.mm') in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('another/ios_file.mm') not in errors[0].message)
-    self.assertTrue(os.path.normpath('some/mac/file.mm') in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/ios/file_egtest.mm') in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/ios/file_unittest.mm') in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/doc/file.md') not in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/doc/ok_file.md') not in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/doc/branch_name_file.md') not in
-        errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/java/file/TestJavaDoc.java') not in
-        errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/java/file/TestJava.java') not in
-        errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/html/file.html') not in errors[0].message)
-
-  def testBlockedTermsWithLegacy(self):
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    input_api.presubmit_local_path = ''
-
-    input_api.files = [
-        MockFile(
-            os.path.normpath(
-                'infra/inclusive_language_presubmit_exempt_dirs.txt'), [
-                    'some/ios 2 1',
-                    'some/other/dir 2 1',
+            MockFile(
+                os.path.normpath('some/java/file/TestJava.java'),
+                [
+                    'class TestJava {',
+                    '  public String master; // nocheck',  # nocheck
+                    '}'
                 ]),
-        MockFile(
-            os.path.normpath('some/ios/file.mm'),
-            [
-                'TEST(SomeClassTest, SomeInteraction, blacklist) {',  # nocheck
-                '}'
-            ]),
-        MockFile(
-            os.path.normpath('some/ios/subdir/file.mm'),
-            [
-                'TEST(SomeClassTest, SomeInteraction, blacklist) {',  # nocheck
-                '}'
-            ]),
-        MockFile(
-            os.path.normpath('some/mac/file.mm'),
-            [
-                'TEST(SomeClassTest, SomeInteraction, BlackList) {',  # nocheck
-                '}'
-            ]),
-        MockFile(os.path.normpath('another/ios_file.mm'),
-                 ['class SomeTest : public testing::Test blocklist {};']),
-        MockFile(os.path.normpath('some/ios/file_egtest.mm'),
-                 ['- (void)testSomething { V(whitelist); }']),  # nocheck
-        MockFile(os.path.normpath('some/ios/file_unittest.mm'),
-                 ['TEST_F(SomeTest, Whitelist) { V(allowlist); }']),  # nocheck
-    ]
-
-    errors = presubmit_canned_checks.CheckInclusiveLanguage(
-        input_api, MockOutputApi())
-    self.assertEqual(1, len(errors))
-    self.assertTrue(
-        os.path.normpath('some/ios/file.mm') not in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/ios/subdir/file.mm') in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('another/ios_file.mm') not in errors[0].message)
-    self.assertTrue(os.path.normpath('some/mac/file.mm') in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/ios/file_egtest.mm') not in errors[0].message)
-    self.assertTrue(
-        os.path.normpath('some/ios/file_unittest.mm') not in errors[0].message)
-
-  def testBlockedTermsWithNocheck(self):
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    input_api.presubmit_local_path = ''
-
-    input_api.files = [
-        MockFile(
-            os.path.normpath(
-                'infra/inclusive_language_presubmit_exempt_dirs.txt'), [
-                    'some/dir 2 1',
-                    'some/other/dir 2 1',
+            MockFile(
+                os.path.normpath('some/html/file.html'),
+                [
+                    '<-- an existing html multiline comment',
+                    'says "master" here --><!-- nocheck -->',  # nocheck
+                    '<!-- in the comment -->'
+                ])
+        ]
+
+        errors = presubmit_canned_checks.CheckInclusiveLanguage(
+            input_api, MockOutputApi())
+        self.assertEqual(0, len(errors))
+
+    def testTopLevelDirExcempt(self):
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        input_api.presubmit_local_path = ''
+
+        input_api.files = [
+            MockFile(
+                os.path.normpath(
+                    'infra/inclusive_language_presubmit_exempt_dirs.txt'), [
+                        '. 2 1',
+                        'some/other/dir 2 1',
+                    ]),
+            MockFile(
+                os.path.normpath('presubmit_canned_checks_test.py'),
+                [
+                    'TEST(SomeClassTest, SomeInteraction, blacklist) {',  # nocheck
+                    '}'
                 ]),
-        MockFile(
-            os.path.normpath('some/ios/file.mm'),
-            [
-                'TEST(SomeClassTest, SomeInteraction, ',
-                ' blacklist) { // nocheck',  # nocheck
-                '}'
-            ]),
-        MockFile(
-            os.path.normpath('some/mac/file.mm'),
-            [
-                'TEST(SomeClassTest, SomeInteraction, ',
-                'BlackList) { // nocheck',  # nocheck
-                '}'
-            ]),
-        MockFile(os.path.normpath('another/ios_file.mm'),
-                 ['class SomeTest : public testing::Test blocklist {};']),
-        MockFile(os.path.normpath('some/ios/file_egtest.mm'),
-                 ['- (void)testSomething { ', 'V(whitelist); } // nocheck'
-                  ]),  # nocheck
-        MockFile(
-            os.path.normpath('some/ios/file_unittest.mm'),
-            [
-                'TEST_F(SomeTest, Whitelist) // nocheck',  # nocheck
-                ' { V(allowlist); }'
-            ]),
-        MockFile(
-            os.path.normpath('some/doc/file.md'),
-            [
-                'Master in markdown <!-- nocheck -->',  # nocheck
-                '## Subheading is okay'
-            ]),
-        MockFile(
-            os.path.normpath('some/java/file/TestJava.java'),
-            [
-                'class TestJava {',
-                '  public String master; // nocheck',  # nocheck
-                '}'
-            ]),
-        MockFile(
-            os.path.normpath('some/html/file.html'),
-            [
-                '<-- an existing html multiline comment',
-                'says "master" here --><!-- nocheck -->',  # nocheck
-                '<!-- in the comment -->'
-            ])
-    ]
-
-    errors = presubmit_canned_checks.CheckInclusiveLanguage(
-        input_api, MockOutputApi())
-    self.assertEqual(0, len(errors))
-
-  def testTopLevelDirExcempt(self):
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    input_api.presubmit_local_path = ''
-
-    input_api.files = [
-        MockFile(
-            os.path.normpath(
-                'infra/inclusive_language_presubmit_exempt_dirs.txt'), [
-                    '. 2 1',
-                    'some/other/dir 2 1',
+            MockFile(
+                os.path.normpath('presubmit_canned_checks.py'),
+                ['- (void)testSth { V(whitelist); } // nocheck']),  # nocheck
+        ]
+
+        errors = presubmit_canned_checks.CheckInclusiveLanguage(
+            input_api, MockOutputApi())
+        self.assertEqual(1, len(errors))
+        self.assertTrue(
+            os.path.normpath('presubmit_canned_checks_test.py') in
+            errors[0].message)
+        self.assertTrue(
+            os.path.normpath('presubmit_canned_checks.py') not in
+            errors[0].message)
+
+    def testChangeIsForSomeOtherRepo(self):
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: 'v8'
+        input_api.presubmit_local_path = ''
+
+        input_api.files = [
+            MockFile(
+                os.path.normpath('some_file'),
+                [
+                    '# this is a blacklist',  # nocheck
                 ]),
-        MockFile(
-            os.path.normpath('presubmit_canned_checks_test.py'),
-            [
-                'TEST(SomeClassTest, SomeInteraction, blacklist) {',  # nocheck
-                '}'
-            ]),
-        MockFile(os.path.normpath('presubmit_canned_checks.py'),
-                 ['- (void)testSth { V(whitelist); } // nocheck']),  # nocheck
-    ]
-
-    errors = presubmit_canned_checks.CheckInclusiveLanguage(
-        input_api, MockOutputApi())
-    self.assertEqual(1, len(errors))
-    self.assertTrue(
-        os.path.normpath('presubmit_canned_checks_test.py') in
-        errors[0].message)
-    self.assertTrue(
-        os.path.normpath('presubmit_canned_checks.py') not in errors[0].message)
-
-  def testChangeIsForSomeOtherRepo(self):
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: 'v8'
-    input_api.presubmit_local_path = ''
-
-    input_api.files = [
-        MockFile(
-            os.path.normpath('some_file'),
-            [
-                '# this is a blacklist',  # nocheck
-            ]),
-    ]
-    errors = presubmit_canned_checks.CheckInclusiveLanguage(
-        input_api, MockOutputApi())
-    self.assertEqual([], errors)
+        ]
+        errors = presubmit_canned_checks.CheckInclusiveLanguage(
+            input_api, MockOutputApi())
+        self.assertEqual([], errors)
 
 
 class DescriptionChecksTest(unittest.TestCase):
-  def testCheckDescriptionUsesColonInsteadOfEquals(self):
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    input_api.presubmit_local_path = ''
-
-    # Verify error in case of the attempt to use "Bug=".
-    input_api.change = MockChange([], 'Broken description\nBug=123')
-    errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
-        input_api, MockOutputApi())
-    self.assertEqual(1, len(errors))
-    self.assertTrue('Bug=' in errors[0].message)
-
-    # Verify error in case of the attempt to use "Fixed=".
-    input_api.change = MockChange([], 'Broken description\nFixed=123')
-    errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
-        input_api, MockOutputApi())
-    self.assertEqual(1, len(errors))
-    self.assertTrue('Fixed=' in errors[0].message)
-
-    # Verify error in case of the attempt to use the lower case "bug=".
-    input_api.change = MockChange([], 'Broken description lowercase\nbug=123')
-    errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
-        input_api, MockOutputApi())
-    self.assertEqual(1, len(errors))
-    self.assertTrue('Bug=' in errors[0].message)
-
-    # Verify no error in case of "Bug:"
-    input_api.change = MockChange([], 'Correct description\nBug: 123')
-    errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
-        input_api, MockOutputApi())
-    self.assertEqual(0, len(errors))
-
-    # Verify no error in case of "Fixed:"
-    input_api.change = MockChange([], 'Correct description\nFixed: 123')
-    errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
-        input_api, MockOutputApi())
-    self.assertEqual(0, len(errors))
+    def testCheckDescriptionUsesColonInsteadOfEquals(self):
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        input_api.presubmit_local_path = ''
+
+        # Verify error in case of the attempt to use "Bug=".
+        input_api.change = MockChange([], 'Broken description\nBug=123')
+        errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
+            input_api, MockOutputApi())
+        self.assertEqual(1, len(errors))
+        self.assertTrue('Bug=' in errors[0].message)
+
+        # Verify error in case of the attempt to use "Fixed=".
+        input_api.change = MockChange([], 'Broken description\nFixed=123')
+        errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
+            input_api, MockOutputApi())
+        self.assertEqual(1, len(errors))
+        self.assertTrue('Fixed=' in errors[0].message)
+
+        # Verify error in case of the attempt to use the lower case "bug=".
+        input_api.change = MockChange([],
+                                      'Broken description lowercase\nbug=123')
+        errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
+            input_api, MockOutputApi())
+        self.assertEqual(1, len(errors))
+        self.assertTrue('Bug=' in errors[0].message)
+
+        # Verify no error in case of "Bug:"
+        input_api.change = MockChange([], 'Correct description\nBug: 123')
+        errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
+            input_api, MockOutputApi())
+        self.assertEqual(0, len(errors))
+
+        # Verify no error in case of "Fixed:"
+        input_api.change = MockChange([], 'Correct description\nFixed: 123')
+        errors = presubmit_canned_checks.CheckDescriptionUsesColonInsteadOfEquals(
+            input_api, MockOutputApi())
+        self.assertEqual(0, len(errors))
 
 
 class ChromiumDependencyMetadataCheckTest(unittest.TestCase):
-  def testDefaultFileFilter(self):
-    """Checks the default file filter limits the scope to Chromium dependency
+    def testDefaultFileFilter(self):
+        """Checks the default file filter limits the scope to Chromium dependency
     metadata files.
     """
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    input_api.files = [
-        MockFile(os.path.normpath('foo/README.md'), ['Shipped: no?']),
-        MockFile(os.path.normpath('foo/main.py'), ['Shipped: yes?']),
-    ]
-    results = presubmit_canned_checks.CheckChromiumDependencyMetadata(
-        input_api, MockOutputApi())
-    self.assertEqual(len(results), 0)
-
-  def testSkipDeletedFiles(self):
-    """Checks validation is skipped for deleted files."""
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    input_api.files = [
-        MockFile(os.path.normpath('foo/README.chromium'), ['No fields'],
-                 action='D'),
-    ]
-    results = presubmit_canned_checks.CheckChromiumDependencyMetadata(
-        input_api, MockOutputApi())
-    self.assertEqual(len(results), 0)
-
-  def testFeedbackForNoMetadata(self):
-    """Checks presubmit results are returned for files without any metadata."""
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    input_api.files = [
-        MockFile(os.path.normpath('foo/README.chromium'), ['No fields']),
-    ]
-    results = presubmit_canned_checks.CheckChromiumDependencyMetadata(
-        input_api, MockOutputApi())
-    self.assertEqual(len(results), 1)
-    self.assertTrue("No dependency metadata" in results[0].message)
-
-  def testFeedbackForInvalidMetadata(self):
-    """Checks presubmit results are returned for files with invalid metadata."""
-    input_api = MockInputApi()
-    input_api.change.RepositoryRoot = lambda: ''
-    test_file = MockFile(os.path.normpath('foo/README.chromium'),
-                         ['Shipped: yes?'])
-    input_api.files = [test_file]
-    results = presubmit_canned_checks.CheckChromiumDependencyMetadata(
-        input_api, MockOutputApi())
-
-    # There should be 10 results due to
-    # - missing 5 mandatory fields: Name, URL, Version, License, and
-    #                               Security Critical
-    # - missing 4 required fields: Date, Revision, License File, and
-    #                              License Android Compatible
-    # - Shipped should be only 'yes' or 'no'.
-    self.assertEqual(len(results), 10)
-
-    # Check each presubmit result is associated with the test file.
-    for result in results:
-      self.assertEqual(len(result.items), 1)
-      self.assertEqual(result.items[0], test_file)
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        input_api.files = [
+            MockFile(os.path.normpath('foo/README.md'), ['Shipped: no?']),
+            MockFile(os.path.normpath('foo/main.py'), ['Shipped: yes?']),
+        ]
+        results = presubmit_canned_checks.CheckChromiumDependencyMetadata(
+            input_api, MockOutputApi())
+        self.assertEqual(len(results), 0)
+
+    def testSkipDeletedFiles(self):
+        """Checks validation is skipped for deleted files."""
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        input_api.files = [
+            MockFile(os.path.normpath('foo/README.chromium'), ['No fields'],
+                     action='D'),
+        ]
+        results = presubmit_canned_checks.CheckChromiumDependencyMetadata(
+            input_api, MockOutputApi())
+        self.assertEqual(len(results), 0)
+
+    def testFeedbackForNoMetadata(self):
+        """Checks presubmit results are returned for files without any metadata."""
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        input_api.files = [
+            MockFile(os.path.normpath('foo/README.chromium'), ['No fields']),
+        ]
+        results = presubmit_canned_checks.CheckChromiumDependencyMetadata(
+            input_api, MockOutputApi())
+        self.assertEqual(len(results), 1)
+        self.assertTrue("No dependency metadata" in results[0].message)
+
+    def testFeedbackForInvalidMetadata(self):
+        """Checks presubmit results are returned for files with invalid metadata."""
+        input_api = MockInputApi()
+        input_api.change.RepositoryRoot = lambda: ''
+        test_file = MockFile(os.path.normpath('foo/README.chromium'),
+                             ['Shipped: yes?'])
+        input_api.files = [test_file]
+        results = presubmit_canned_checks.CheckChromiumDependencyMetadata(
+            input_api, MockOutputApi())
+
+        # There should be 10 results due to
+        # - missing 5 mandatory fields: Name, URL, Version, License, and
+        #                               Security Critical
+        # - missing 4 required fields: Date, Revision, License File, and
+        #                              License Android Compatible
+        # - Shipped should be only 'yes' or 'no'.
+        self.assertEqual(len(results), 10)
+
+        # Check each presubmit result is associated with the test file.
+        for result in results:
+            self.assertEqual(len(result.items), 1)
+            self.assertEqual(result.items[0], test_file)
 
 
 class CheckUpdateOwnersFileReferences(unittest.TestCase):
-  def testShowsWarningIfDeleting(self):
-    input_api = MockInputApi()
-    input_api.files = [
-        MockFile(os.path.normpath('foo/OWNERS'), [], [], action='D'),
-    ]
-    results = presubmit_canned_checks.CheckUpdateOwnersFileReferences(
-        input_api, MockOutputApi())
-    self.assertEqual(1, len(results))
-    self.assertEqual('warning', results[0].type)
-    self.assertEqual(1, len(results[0].items))
-
-  def testShowsWarningIfMoving(self):
-    input_api = MockInputApi()
-    input_api.files = [
-        MockFile(os.path.normpath('new_directory/OWNERS'), [], [], action='A'),
-        MockFile(os.path.normpath('old_directory/OWNERS'), [], [], action='D'),
-    ]
-    results = presubmit_canned_checks.CheckUpdateOwnersFileReferences(
-        input_api, MockOutputApi())
-    self.assertEqual(1, len(results))
-    self.assertEqual('warning', results[0].type)
-    self.assertEqual(1, len(results[0].items))
-
-  def testNoWarningIfAdding(self):
-    input_api = MockInputApi()
-    input_api.files = [
-        MockFile(os.path.normpath('foo/OWNERS'), [], [], action='A'),
-    ]
-    results = presubmit_canned_checks.CheckUpdateOwnersFileReferences(
-        input_api, MockOutputApi())
-    self.assertEqual(0, len(results))
+    def testShowsWarningIfDeleting(self):
+        input_api = MockInputApi()
+        input_api.files = [
+            MockFile(os.path.normpath('foo/OWNERS'), [], [], action='D'),
+        ]
+        results = presubmit_canned_checks.CheckUpdateOwnersFileReferences(
+            input_api, MockOutputApi())
+        self.assertEqual(1, len(results))
+        self.assertEqual('warning', results[0].type)
+        self.assertEqual(1, len(results[0].items))
+
+    def testShowsWarningIfMoving(self):
+        input_api = MockInputApi()
+        input_api.files = [
+            MockFile(os.path.normpath('new_directory/OWNERS'), [], [],
+                     action='A'),
+            MockFile(os.path.normpath('old_directory/OWNERS'), [], [],
+                     action='D'),
+        ]
+        results = presubmit_canned_checks.CheckUpdateOwnersFileReferences(
+            input_api, MockOutputApi())
+        self.assertEqual(1, len(results))
+        self.assertEqual('warning', results[0].type)
+        self.assertEqual(1, len(results[0].items))
+
+    def testNoWarningIfAdding(self):
+        input_api = MockInputApi()
+        input_api.files = [
+            MockFile(os.path.normpath('foo/OWNERS'), [], [], action='A'),
+        ]
+        results = presubmit_canned_checks.CheckUpdateOwnersFileReferences(
+            input_api, MockOutputApi())
+        self.assertEqual(0, len(results))
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

Plik diff jest za duży
+ 536 - 563
tests/presubmit_unittest.py


+ 105 - 102
tests/rdb_wrapper_test.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2020 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for rdb_wrapper.py"""
 
 from __future__ import print_function
@@ -23,114 +22,118 @@ import rdb_wrapper
 
 @contextlib.contextmanager
 def lucictx(ctx):
-  try:
-    orig = os.environ.get('LUCI_CONTEXT')
-
-    if ctx is None:
-      os.environ.pop('LUCI_CONTEXT', '')
-      yield
-    else:
-      # windows doesn't allow a file to be opened twice at the same time.
-      # therefore, this closes the temp file before yield, so that
-      # rdb_wrapper.client() can open the LUCI_CONTEXT file.
-      f = tempfile.NamedTemporaryFile(delete=False)
-      f.write(json.dumps(ctx).encode('utf-8'))
-      f.close()
-      os.environ['LUCI_CONTEXT'] = f.name
-      yield
-      os.unlink(f.name)
-
-  finally:
-    if orig is None:
-      os.environ.pop('LUCI_CONTEXT', '')
-    else:
-      os.environ['LUCI_CONTEXT'] = orig
+    try:
+        orig = os.environ.get('LUCI_CONTEXT')
+
+        if ctx is None:
+            os.environ.pop('LUCI_CONTEXT', '')
+            yield
+        else:
+            # windows doesn't allow a file to be opened twice at the same time.
+            # therefore, this closes the temp file before yield, so that
+            # rdb_wrapper.client() can open the LUCI_CONTEXT file.
+            f = tempfile.NamedTemporaryFile(delete=False)
+            f.write(json.dumps(ctx).encode('utf-8'))
+            f.close()
+            os.environ['LUCI_CONTEXT'] = f.name
+            yield
+            os.unlink(f.name)
+
+    finally:
+        if orig is None:
+            os.environ.pop('LUCI_CONTEXT', '')
+        else:
+            os.environ['LUCI_CONTEXT'] = orig
 
 
 @mock.patch.dict(os.environ, {})
 class TestClient(unittest.TestCase):
-  def test_without_lucictx(self):
-    with lucictx(None):
-      with rdb_wrapper.client("prefix") as s:
-        self.assertIsNone(s)
-
-    with lucictx({'something else': {'key': 'value'}}):
-      with rdb_wrapper.client("prefix") as s:
-        self.assertIsNone(s)
-
-  def test_with_lucictx(self):
-    with lucictx({'result_sink': {'address': '127', 'auth_token': 'secret'}}):
-      with rdb_wrapper.client("prefix") as s:
-        self.assertIsNotNone(s)
-        self.assertEqual(
-            s._url,
-            'http://127/prpc/luci.resultsink.v1.Sink/ReportTestResults',
-        )
-        self.assertDictEqual(
-            s._session.headers, {
-                'Accept': 'application/json',
-                'Authorization': 'ResultSink secret',
-                'Content-Type': 'application/json',
-            })
+    def test_without_lucictx(self):
+        with lucictx(None):
+            with rdb_wrapper.client("prefix") as s:
+                self.assertIsNone(s)
+
+        with lucictx({'something else': {'key': 'value'}}):
+            with rdb_wrapper.client("prefix") as s:
+                self.assertIsNone(s)
+
+    def test_with_lucictx(self):
+        with lucictx(
+            {'result_sink': {
+                'address': '127',
+                'auth_token': 'secret'
+            }}):
+            with rdb_wrapper.client("prefix") as s:
+                self.assertIsNotNone(s)
+                self.assertEqual(
+                    s._url,
+                    'http://127/prpc/luci.resultsink.v1.Sink/ReportTestResults',
+                )
+                self.assertDictEqual(
+                    s._session.headers, {
+                        'Accept': 'application/json',
+                        'Authorization': 'ResultSink secret',
+                        'Content-Type': 'application/json',
+                    })
 
 
 class TestResultSink(unittest.TestCase):
-  def test_report(self):
-    session = mock.MagicMock()
-    sink = rdb_wrapper.ResultSink(session, 'http://host', 'test_id_prefix/')
-    sink.report("function_foo", rdb_wrapper.STATUS_PASS, 123)
-    expected = {
-        'testId': 'test_id_prefix/function_foo',
-        'status': rdb_wrapper.STATUS_PASS,
-        'expected': True,
-        'duration': '123.000000000s',
-    }
-    session.post.assert_called_once_with(
-        'http://host',
-        json={'testResults': [expected]},
-    )
-
-  def test_report_failure_reason(self):
-    session = mock.MagicMock()
-    sink = rdb_wrapper.ResultSink(session, 'http://host', 'test_id_prefix/')
-    sink.report("function_foo", rdb_wrapper.STATUS_PASS, 123, 'Bad CL.')
-    expected = {
-        'testId': 'test_id_prefix/function_foo',
-        'status': rdb_wrapper.STATUS_PASS,
-        'expected': True,
-        'duration': '123.000000000s',
-        'failureReason': {
-            'primaryErrorMessage': 'Bad CL.',
-        },
-    }
-    session.post.assert_called_once_with(
-        'http://host',
-        json={'testResults': [expected]},
-    )
-
-  def test_report_failure_reason_truncated(self):
-    session = mock.MagicMock()
-    sink = rdb_wrapper.ResultSink(session, 'http://host', 'test_id_prefix/')
-    sink.report("function_foo", rdb_wrapper.STATUS_PASS, 123, 'X' * 1025)
-    trunc_text = rdb_wrapper._FAILURE_REASON_TRUNCATE_TEXT
-    limit = rdb_wrapper._FAILURE_REASON_LENGTH_LIMIT
-    expected_truncated_error = 'X' * (limit - len(trunc_text)) + trunc_text
-    expected = {
-        'testId': 'test_id_prefix/function_foo',
-        'status': rdb_wrapper.STATUS_PASS,
-        'expected': True,
-        'duration': '123.000000000s',
-        'failureReason': {
-            'primaryErrorMessage': expected_truncated_error,
-        },
-    }
-    session.post.assert_called_once_with(
-        'http://host',
-        json={'testResults': [expected]},
-    )
+    def test_report(self):
+        session = mock.MagicMock()
+        sink = rdb_wrapper.ResultSink(session, 'http://host', 'test_id_prefix/')
+        sink.report("function_foo", rdb_wrapper.STATUS_PASS, 123)
+        expected = {
+            'testId': 'test_id_prefix/function_foo',
+            'status': rdb_wrapper.STATUS_PASS,
+            'expected': True,
+            'duration': '123.000000000s',
+        }
+        session.post.assert_called_once_with(
+            'http://host',
+            json={'testResults': [expected]},
+        )
+
+    def test_report_failure_reason(self):
+        session = mock.MagicMock()
+        sink = rdb_wrapper.ResultSink(session, 'http://host', 'test_id_prefix/')
+        sink.report("function_foo", rdb_wrapper.STATUS_PASS, 123, 'Bad CL.')
+        expected = {
+            'testId': 'test_id_prefix/function_foo',
+            'status': rdb_wrapper.STATUS_PASS,
+            'expected': True,
+            'duration': '123.000000000s',
+            'failureReason': {
+                'primaryErrorMessage': 'Bad CL.',
+            },
+        }
+        session.post.assert_called_once_with(
+            'http://host',
+            json={'testResults': [expected]},
+        )
+
+    def test_report_failure_reason_truncated(self):
+        session = mock.MagicMock()
+        sink = rdb_wrapper.ResultSink(session, 'http://host', 'test_id_prefix/')
+        sink.report("function_foo", rdb_wrapper.STATUS_PASS, 123, 'X' * 1025)
+        trunc_text = rdb_wrapper._FAILURE_REASON_TRUNCATE_TEXT
+        limit = rdb_wrapper._FAILURE_REASON_LENGTH_LIMIT
+        expected_truncated_error = 'X' * (limit - len(trunc_text)) + trunc_text
+        expected = {
+            'testId': 'test_id_prefix/function_foo',
+            'status': rdb_wrapper.STATUS_PASS,
+            'expected': True,
+            'duration': '123.000000000s',
+            'failureReason': {
+                'primaryErrorMessage': expected_truncated_error,
+            },
+        }
+        session.post.assert_called_once_with(
+            'http://host',
+            json={'testResults': [expected]},
+        )
 
 
 if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  unittest.main()
+    logging.basicConfig(
+        level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
+    unittest.main()

+ 4 - 4
tests/recipes_test.py

@@ -3,7 +3,6 @@
 # Copyright (c) 2015 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Runs simulation tests and lint on the recipes."""
 
 import os
@@ -11,10 +10,11 @@ import subprocess
 
 ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
 
+
 def recipes_py(*args):
-  subprocess.check_call([
-      os.path.join(ROOT_DIR, 'recipes', 'recipes.py')
-  ] + list(args))
+    subprocess.check_call([os.path.join(ROOT_DIR, 'recipes', 'recipes.py')] +
+                          list(args))
+
 
 recipes_py('test', 'run')
 

+ 236 - 205
tests/reclient_metrics_test.py

@@ -18,212 +18,243 @@ import reclient_metrics
 
 
 class ReclientMetricsTest(unittest.TestCase):
-  def test_is_googler(self):
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      run_mock.return_value.returncode = 0
-      run_mock.return_value.stdout = 'Logged in as abc@google.com.'
-      self.assertTrue(reclient_metrics.is_googler())
-
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      run_mock.return_value.returncode = 1
-      self.assertFalse(reclient_metrics.is_googler())
-
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      run_mock.return_value.returncode = 0
-      run_mock.return_value.stdout = ''
-      self.assertFalse(reclient_metrics.is_googler())
-
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      run_mock.return_value.returncode = 0
-      run_mock.return_value.stdout = 'Logged in as foo@example.com.'
-      self.assertFalse(reclient_metrics.is_googler())
-
-    with unittest.mock.patch('subprocess.run') as run_mock:
-      self.assertTrue(reclient_metrics.is_googler({
-          'is-googler': True,
-      }))
-      self.assertFalse(reclient_metrics.is_googler({
-          'is-googler': False,
-      }))
-      run_mock.assert_not_called()
-
-  def test_load_and_save_config(self):
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 0
-        run_mock.return_value.stdout = 'Logged in as abc@google.com.'
-        cfg1 = reclient_metrics.load_config()
-        self.assertDictEqual(
-            cfg1, {
-                'is-googler': True,
-                'countdown': 10,
-                'version': reclient_metrics.VERSION,
-            })
-        reclient_metrics.save_config(cfg1)
-        cfg2 = reclient_metrics.load_config()
-        self.assertDictEqual(
-            cfg2, {
+    def test_is_googler(self):
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            run_mock.return_value.returncode = 0
+            run_mock.return_value.stdout = 'Logged in as abc@google.com.'
+            self.assertTrue(reclient_metrics.is_googler())
+
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            run_mock.return_value.returncode = 1
+            self.assertFalse(reclient_metrics.is_googler())
+
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            run_mock.return_value.returncode = 0
+            run_mock.return_value.stdout = ''
+            self.assertFalse(reclient_metrics.is_googler())
+
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            run_mock.return_value.returncode = 0
+            run_mock.return_value.stdout = 'Logged in as foo@example.com.'
+            self.assertFalse(reclient_metrics.is_googler())
+
+        with unittest.mock.patch('subprocess.run') as run_mock:
+            self.assertTrue(reclient_metrics.is_googler({
                 'is-googler': True,
-                'countdown': 9,
-                'version': reclient_metrics.VERSION,
-            })
-        run_mock.assert_called_once()
-
-  def test_check_status(self):
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 0
-        run_mock.return_value.stdout = 'Logged in as abc@google.com.'
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertFalse(reclient_metrics.check_status("outdir"))
-          self.assertIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        run_mock.assert_called_once()
-
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 0
-        run_mock.return_value.stdout = 'Logged in as abc@google.com.'
-        for i in range(10):
-          with unittest.mock.patch('sys.stdout',
-                                   new=io.StringIO()) as stdout_mock:
-            self.assertFalse(reclient_metrics.check_status("outdir"))
-            self.assertIn("Your reclient metrics will", stdout_mock.getvalue())
-            self.assertIn(
-                os.path.join("outdir", ".reproxy_tmp", "logs",
-                             "rbe_metrics.txt"), stdout_mock.getvalue())
-            self.assertIn("you run autoninja another %d time(s)" % (10 - i),
-                          stdout_mock.getvalue())
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertTrue(reclient_metrics.check_status("outdir"))
-          self.assertNotIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertNotIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        run_mock.assert_called_once()
-
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 0
-        run_mock.return_value.stdout = 'Logged in as abc@example.com.'
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertFalse(reclient_metrics.check_status("outdir"))
-          self.assertNotIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertNotIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        run_mock.assert_called_once()
-
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 1
-        run_mock.return_value.stdout = ''
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertFalse(reclient_metrics.check_status("outdir"))
-          self.assertNotIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertNotIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        run_mock.assert_called_once()
-
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 0
-        run_mock.return_value.stdout = 'Logged in as abc@google.com.'
-        reclient_metrics.main(["reclient_metrics.py", "opt-in"])
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertTrue(reclient_metrics.check_status("outdir"))
-          self.assertNotIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertNotIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        run_mock.assert_called_once()
-
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 0
-        run_mock.return_value.stdout = 'Logged in as abc@google.com.'
-        for i in range(3):
-          with unittest.mock.patch('sys.stdout',
-                                   new=io.StringIO()) as stdout_mock:
-            self.assertFalse(reclient_metrics.check_status("outdir"))
-            self.assertIn("Your reclient metrics will", stdout_mock.getvalue())
-            self.assertIn(
-                os.path.join("outdir", ".reproxy_tmp", "logs",
-                             "rbe_metrics.txt"), stdout_mock.getvalue())
-            self.assertIn("you run autoninja another %d time(s)" % (10 - i),
-                          stdout_mock.getvalue())
-        reclient_metrics.main(["reclient_metrics.py", "opt-in"])
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertTrue(reclient_metrics.check_status("outdir"))
-          self.assertNotIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertNotIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        run_mock.assert_called_once()
-
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 0
-        run_mock.return_value.stdout = 'Logged in as abc@example.com.'
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertFalse(reclient_metrics.check_status("outdir"))
-          self.assertNotIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertNotIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        reclient_metrics.main(["reclient_metrics.py", "opt-in"])
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertFalse(reclient_metrics.check_status("outdir"))
-          self.assertNotIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertNotIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        run_mock.assert_called_once()
-
-    with tempfile.TemporaryDirectory() as tmpdir:
-      reclient_metrics.CONFIG = os.path.join(tmpdir, 'reclient_metrics.cfg')
-      with unittest.mock.patch('subprocess.run') as run_mock:
-        run_mock.return_value.returncode = 0
-        run_mock.return_value.stdout = 'Logged in as abc@google.com.'
-        for i in range(3):
-          with unittest.mock.patch('sys.stdout',
-                                   new=io.StringIO()) as stdout_mock:
-            self.assertFalse(reclient_metrics.check_status("outdir"))
-            self.assertIn("Your reclient metrics will", stdout_mock.getvalue())
-            self.assertIn(
-                os.path.join("outdir", ".reproxy_tmp", "logs",
-                             "rbe_metrics.txt"), stdout_mock.getvalue())
-            self.assertIn("you run autoninja another %d time(s)" % (10 - i),
-                          stdout_mock.getvalue())
-        reclient_metrics.main(["reclient_metrics.py", "opt-out"])
-        with unittest.mock.patch('sys.stdout',
-                                 new=io.StringIO()) as stdout_mock:
-          self.assertFalse(reclient_metrics.check_status("outdir"))
-          self.assertNotIn("Your reclient metrics will", stdout_mock.getvalue())
-          self.assertNotIn(
-              os.path.join("outdir", ".reproxy_tmp", "logs", "rbe_metrics.txt"),
-              stdout_mock.getvalue())
-        run_mock.assert_called_once()
+            }))
+            self.assertFalse(
+                reclient_metrics.is_googler({
+                    'is-googler': False,
+                }))
+            run_mock.assert_not_called()
+
+    def test_load_and_save_config(self):
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 0
+                run_mock.return_value.stdout = 'Logged in as abc@google.com.'
+                cfg1 = reclient_metrics.load_config()
+                self.assertDictEqual(
+                    cfg1, {
+                        'is-googler': True,
+                        'countdown': 10,
+                        'version': reclient_metrics.VERSION,
+                    })
+                reclient_metrics.save_config(cfg1)
+                cfg2 = reclient_metrics.load_config()
+                self.assertDictEqual(
+                    cfg2, {
+                        'is-googler': True,
+                        'countdown': 9,
+                        'version': reclient_metrics.VERSION,
+                    })
+                run_mock.assert_called_once()
+
+    def test_check_status(self):
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 0
+                run_mock.return_value.stdout = 'Logged in as abc@google.com.'
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertFalse(reclient_metrics.check_status("outdir"))
+                    self.assertIn("Your reclient metrics will",
+                                  stdout_mock.getvalue())
+                    self.assertIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                run_mock.assert_called_once()
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 0
+                run_mock.return_value.stdout = 'Logged in as abc@google.com.'
+                for i in range(10):
+                    with unittest.mock.patch('sys.stdout',
+                                             new=io.StringIO()) as stdout_mock:
+                        self.assertFalse(
+                            reclient_metrics.check_status("outdir"))
+                        self.assertIn("Your reclient metrics will",
+                                      stdout_mock.getvalue())
+                        self.assertIn(
+                            os.path.join("outdir", ".reproxy_tmp", "logs",
+                                         "rbe_metrics.txt"),
+                            stdout_mock.getvalue())
+                        self.assertIn(
+                            "you run autoninja another %d time(s)" % (10 - i),
+                            stdout_mock.getvalue())
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertTrue(reclient_metrics.check_status("outdir"))
+                    self.assertNotIn("Your reclient metrics will",
+                                     stdout_mock.getvalue())
+                    self.assertNotIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                run_mock.assert_called_once()
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 0
+                run_mock.return_value.stdout = 'Logged in as abc@example.com.'
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertFalse(reclient_metrics.check_status("outdir"))
+                    self.assertNotIn("Your reclient metrics will",
+                                     stdout_mock.getvalue())
+                    self.assertNotIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                run_mock.assert_called_once()
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 1
+                run_mock.return_value.stdout = ''
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertFalse(reclient_metrics.check_status("outdir"))
+                    self.assertNotIn("Your reclient metrics will",
+                                     stdout_mock.getvalue())
+                    self.assertNotIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                run_mock.assert_called_once()
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 0
+                run_mock.return_value.stdout = 'Logged in as abc@google.com.'
+                reclient_metrics.main(["reclient_metrics.py", "opt-in"])
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertTrue(reclient_metrics.check_status("outdir"))
+                    self.assertNotIn("Your reclient metrics will",
+                                     stdout_mock.getvalue())
+                    self.assertNotIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                run_mock.assert_called_once()
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 0
+                run_mock.return_value.stdout = 'Logged in as abc@google.com.'
+                for i in range(3):
+                    with unittest.mock.patch('sys.stdout',
+                                             new=io.StringIO()) as stdout_mock:
+                        self.assertFalse(
+                            reclient_metrics.check_status("outdir"))
+                        self.assertIn("Your reclient metrics will",
+                                      stdout_mock.getvalue())
+                        self.assertIn(
+                            os.path.join("outdir", ".reproxy_tmp", "logs",
+                                         "rbe_metrics.txt"),
+                            stdout_mock.getvalue())
+                        self.assertIn(
+                            "you run autoninja another %d time(s)" % (10 - i),
+                            stdout_mock.getvalue())
+                reclient_metrics.main(["reclient_metrics.py", "opt-in"])
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertTrue(reclient_metrics.check_status("outdir"))
+                    self.assertNotIn("Your reclient metrics will",
+                                     stdout_mock.getvalue())
+                    self.assertNotIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                run_mock.assert_called_once()
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 0
+                run_mock.return_value.stdout = 'Logged in as abc@example.com.'
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertFalse(reclient_metrics.check_status("outdir"))
+                    self.assertNotIn("Your reclient metrics will",
+                                     stdout_mock.getvalue())
+                    self.assertNotIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                reclient_metrics.main(["reclient_metrics.py", "opt-in"])
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertFalse(reclient_metrics.check_status("outdir"))
+                    self.assertNotIn("Your reclient metrics will",
+                                     stdout_mock.getvalue())
+                    self.assertNotIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                run_mock.assert_called_once()
+
+        with tempfile.TemporaryDirectory() as tmpdir:
+            reclient_metrics.CONFIG = os.path.join(tmpdir,
+                                                   'reclient_metrics.cfg')
+            with unittest.mock.patch('subprocess.run') as run_mock:
+                run_mock.return_value.returncode = 0
+                run_mock.return_value.stdout = 'Logged in as abc@google.com.'
+                for i in range(3):
+                    with unittest.mock.patch('sys.stdout',
+                                             new=io.StringIO()) as stdout_mock:
+                        self.assertFalse(
+                            reclient_metrics.check_status("outdir"))
+                        self.assertIn("Your reclient metrics will",
+                                      stdout_mock.getvalue())
+                        self.assertIn(
+                            os.path.join("outdir", ".reproxy_tmp", "logs",
+                                         "rbe_metrics.txt"),
+                            stdout_mock.getvalue())
+                        self.assertIn(
+                            "you run autoninja another %d time(s)" % (10 - i),
+                            stdout_mock.getvalue())
+                reclient_metrics.main(["reclient_metrics.py", "opt-out"])
+                with unittest.mock.patch('sys.stdout',
+                                         new=io.StringIO()) as stdout_mock:
+                    self.assertFalse(reclient_metrics.check_status("outdir"))
+                    self.assertNotIn("Your reclient metrics will",
+                                     stdout_mock.getvalue())
+                    self.assertNotIn(
+                        os.path.join("outdir", ".reproxy_tmp", "logs",
+                                     "rbe_metrics.txt"), stdout_mock.getvalue())
+                run_mock.assert_called_once()
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 180 - 172
tests/roll_dep_test.py

@@ -17,188 +17,196 @@ from testing_support import fake_repos
 ROLL_DEP = os.path.join(ROOT_DIR, 'roll-dep')
 GCLIENT = os.path.join(ROOT_DIR, 'gclient')
 
+# TODO: Should fix these warnings.
+# pylint: disable=line-too-long
+
 
 class FakeRepos(fake_repos.FakeReposBase):
-  NB_GIT_REPOS = 2
-
-  def populateGit(self):
-    self._commit_git('repo_2', {
-        'origin': 'git/repo_2@1',
-    })
-    self._commit_git('repo_2', {
-        'origin': 'git/repo_2@2',
-    })
-    self._commit_git('repo_2', {
-        'origin': 'git/repo_2@3',
-    })
-
-    self._commit_git(
-        'repo_1', {
-            'DEPS': '\n'.join([
-                'deps = {',
-                ' "src/foo": "file:///%(git_base)srepo_2@%(repo_2_revision)s",',
-                '}',
-                'hooks = [',
-                '  {"action": ["foo", "--android", "{checkout_android}"]}',
-                ']',
-            ]) % {
-                'git_base': self.git_base.replace('\\', '\\\\'),
-                'repo_2_revision': self.git_hashes['repo_2'][1][0],
-            },
+    NB_GIT_REPOS = 2
+
+    def populateGit(self):
+        self._commit_git('repo_2', {
+            'origin': 'git/repo_2@1',
+        })
+        self._commit_git('repo_2', {
+            'origin': 'git/repo_2@2',
         })
+        self._commit_git('repo_2', {
+            'origin': 'git/repo_2@3',
+        })
+
+        self._commit_git(
+            'repo_1', {
+                'DEPS': '\n'.join([
+                    'deps = {',
+                    ' "src/foo": "file:///%(git_base)srepo_2@%(repo_2_revision)s",',
+                    '}',
+                    'hooks = [',
+                    '  {"action": ["foo", "--android", "{checkout_android}"]}',
+                    ']',
+                ]) % {
+                    'git_base': self.git_base.replace('\\', '\\\\'),
+                    'repo_2_revision': self.git_hashes['repo_2'][1][0],
+                },
+            })
 
 
 class RollDepTest(fake_repos.FakeReposTestBase):
-  FAKE_REPOS_CLASS = FakeRepos
-
-  def setUp(self):
-    super(RollDepTest, self).setUp()
-    # Make sure it doesn't try to auto update when testing!
-    self.env = os.environ.copy()
-    self.env['DEPOT_TOOLS_UPDATE'] = '0'
-    self.env['DEPOT_TOOLS_METRICS'] = '0'
-    # Suppress Python 3 warnings and other test undesirables.
-    self.env['GCLIENT_TEST'] = '1'
-
-    self.maxDiff = None
-
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    self.src_dir = os.path.join(self.root_dir, 'src')
-    self.foo_dir = os.path.join(self.src_dir, 'foo')
-    if self.enabled:
-      self.call([GCLIENT, 'config', self.git_base + 'repo_1', '--name', 'src'],
+    FAKE_REPOS_CLASS = FakeRepos
+
+    def setUp(self):
+        super(RollDepTest, self).setUp()
+        # Make sure it doesn't try to auto update when testing!
+        self.env = os.environ.copy()
+        self.env['DEPOT_TOOLS_UPDATE'] = '0'
+        self.env['DEPOT_TOOLS_METRICS'] = '0'
+        # Suppress Python 3 warnings and other test undesirables.
+        self.env['GCLIENT_TEST'] = '1'
+
+        self.maxDiff = None
+
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        self.src_dir = os.path.join(self.root_dir, 'src')
+        self.foo_dir = os.path.join(self.src_dir, 'foo')
+        if self.enabled:
+            self.call(
+                [GCLIENT, 'config', self.git_base + 'repo_1', '--name', 'src'],
                 cwd=self.root_dir)
-      self.call([GCLIENT, 'sync'], cwd=self.root_dir)
-
-  def call(self, cmd, cwd=None):
-    cwd = cwd or self.src_dir
-    process = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE,
-                               stderr=subprocess.PIPE, env=self.env,
-                               shell=sys.platform.startswith('win'))
-    stdout, stderr = process.communicate()
-    logging.debug("XXX: %s\n%s\nXXX" % (' '.join(cmd), stdout))
-    logging.debug("YYY: %s\n%s\nYYY" % (' '.join(cmd), stderr))
-    stdout = stdout.decode('utf-8')
-    stderr = stderr.decode('utf-8')
-    return (stdout.replace('\r\n', '\n'), stderr.replace('\r\n', '\n'),
-            process.returncode)
-
-  def testRollsDep(self):
-    if not self.enabled:
-      return
-    stdout, stderr, returncode = self.call([ROLL_DEP, 'src/foo'])
-    expected_revision = self.githash('repo_2', 3)
-
-    self.assertEqual(stderr, '')
-    self.assertEqual(returncode, 0)
-
-    with open(os.path.join(self.src_dir, 'DEPS')) as f:
-      contents = f.read()
-
-    self.assertEqual(self.gitrevparse(self.foo_dir), expected_revision)
-    self.assertEqual([
-        'deps = {',
-        ' "src/foo": "file:///' + self.git_base.replace('\\', '\\\\') +
-        'repo_2@' + expected_revision + '",',
-        '}',
-        'hooks = [',
-        '  {"action": ["foo", "--android", "{checkout_android}"]}',
-        ']',
-    ], contents.splitlines())
-
-    commit_message = self.call(['git', 'log', '-n', '1'])[0]
-
-    expected_message = 'Roll src/foo/ %s..%s (2 commits)' % (
-        self.githash('repo_2', 1)[:9], self.githash('repo_2', 3)[:9])
-
-    self.assertIn(expected_message, stdout)
-    self.assertIn(expected_message, commit_message)
-
-  def testRollsDepReviewers(self):
-    if not self.enabled:
-      return
-
-    stdout, stderr, returncode = self.call([
-        ROLL_DEP, 'src/foo', '-r', 'foo@example.com', '-r',
-        'bar@example.com,baz@example.com'
-    ])
-
-    self.assertEqual(stderr, '')
-    self.assertEqual(returncode, 0)
-
-    expected_message = 'R=foo@example.com,bar@example.com,baz@example.com'
-
-    self.assertIn(expected_message, stdout)
-
-  def testRollsDepToSpecificRevision(self):
-    if not self.enabled:
-      return
-    stdout, stderr, returncode = self.call(
-        [ROLL_DEP, 'src/foo', '--roll-to', self.githash('repo_2', 2)])
-    expected_revision = self.githash('repo_2', 2)
-
-    self.assertEqual(stderr, '')
-    self.assertEqual(returncode, 0)
-
-    with open(os.path.join(self.src_dir, 'DEPS')) as f:
-      contents = f.read()
-
-    self.assertEqual(self.gitrevparse(self.foo_dir), expected_revision)
-    self.assertEqual([
-        'deps = {',
-        ' "src/foo": "file:///' + self.git_base.replace('\\', '\\\\') +
-        'repo_2@' + expected_revision + '",',
-        '}',
-        'hooks = [',
-        '  {"action": ["foo", "--android", "{checkout_android}"]}',
-        ']',
-    ], contents.splitlines())
-
-    commit_message = self.call(['git', 'log', '-n', '1'])[0]
-
-    expected_message = 'Roll src/foo/ %s..%s (1 commit)' % (
-        self.githash('repo_2', 1)[:9], self.githash('repo_2', 2)[:9])
-
-    self.assertIn(expected_message, stdout)
-    self.assertIn(expected_message, commit_message)
-
-  def testRollsDepLogLimit(self):
-    if not self.enabled:
-      return
-    stdout, stderr, returncode = self.call(
-        [ROLL_DEP, 'src/foo', '--log-limit', '1'])
-    expected_revision = self.githash('repo_2', 3)
-
-    self.assertEqual(stderr, '')
-    self.assertEqual(returncode, 0)
-
-    with open(os.path.join(self.src_dir, 'DEPS')) as f:
-      contents = f.read()
-
-    self.assertEqual(self.gitrevparse(self.foo_dir), expected_revision)
-    self.assertEqual([
-        'deps = {',
-        ' "src/foo": "file:///' + self.git_base.replace('\\', '\\\\') +
-        'repo_2@' + expected_revision + '",',
-        '}',
-        'hooks = [',
-        '  {"action": ["foo", "--android", "{checkout_android}"]}',
-        ']',
-    ], contents.splitlines())
+            self.call([GCLIENT, 'sync'], cwd=self.root_dir)
+
+    def call(self, cmd, cwd=None):
+        cwd = cwd or self.src_dir
+        process = subprocess.Popen(cmd,
+                                   cwd=cwd,
+                                   stdout=subprocess.PIPE,
+                                   stderr=subprocess.PIPE,
+                                   env=self.env,
+                                   shell=sys.platform.startswith('win'))
+        stdout, stderr = process.communicate()
+        logging.debug("XXX: %s\n%s\nXXX" % (' '.join(cmd), stdout))
+        logging.debug("YYY: %s\n%s\nYYY" % (' '.join(cmd), stderr))
+        stdout = stdout.decode('utf-8')
+        stderr = stderr.decode('utf-8')
+        return (stdout.replace('\r\n',
+                               '\n'), stderr.replace('\r\n',
+                                                     '\n'), process.returncode)
+
+    def testRollsDep(self):
+        if not self.enabled:
+            return
+        stdout, stderr, returncode = self.call([ROLL_DEP, 'src/foo'])
+        expected_revision = self.githash('repo_2', 3)
+
+        self.assertEqual(stderr, '')
+        self.assertEqual(returncode, 0)
+
+        with open(os.path.join(self.src_dir, 'DEPS')) as f:
+            contents = f.read()
+
+        self.assertEqual(self.gitrevparse(self.foo_dir), expected_revision)
+        self.assertEqual([
+            'deps = {',
+            ' "src/foo": "file:///' + self.git_base.replace('\\', '\\\\') +
+            'repo_2@' + expected_revision + '",',
+            '}',
+            'hooks = [',
+            '  {"action": ["foo", "--android", "{checkout_android}"]}',
+            ']',
+        ], contents.splitlines())
+
+        commit_message = self.call(['git', 'log', '-n', '1'])[0]
+
+        expected_message = 'Roll src/foo/ %s..%s (2 commits)' % (self.githash(
+            'repo_2', 1)[:9], self.githash('repo_2', 3)[:9])
+
+        self.assertIn(expected_message, stdout)
+        self.assertIn(expected_message, commit_message)
+
+    def testRollsDepReviewers(self):
+        if not self.enabled:
+            return
+
+        stdout, stderr, returncode = self.call([
+            ROLL_DEP, 'src/foo', '-r', 'foo@example.com', '-r',
+            'bar@example.com,baz@example.com'
+        ])
+
+        self.assertEqual(stderr, '')
+        self.assertEqual(returncode, 0)
+
+        expected_message = 'R=foo@example.com,bar@example.com,baz@example.com'
+
+        self.assertIn(expected_message, stdout)
+
+    def testRollsDepToSpecificRevision(self):
+        if not self.enabled:
+            return
+        stdout, stderr, returncode = self.call(
+            [ROLL_DEP, 'src/foo', '--roll-to',
+             self.githash('repo_2', 2)])
+        expected_revision = self.githash('repo_2', 2)
+
+        self.assertEqual(stderr, '')
+        self.assertEqual(returncode, 0)
+
+        with open(os.path.join(self.src_dir, 'DEPS')) as f:
+            contents = f.read()
+
+        self.assertEqual(self.gitrevparse(self.foo_dir), expected_revision)
+        self.assertEqual([
+            'deps = {',
+            ' "src/foo": "file:///' + self.git_base.replace('\\', '\\\\') +
+            'repo_2@' + expected_revision + '",',
+            '}',
+            'hooks = [',
+            '  {"action": ["foo", "--android", "{checkout_android}"]}',
+            ']',
+        ], contents.splitlines())
+
+        commit_message = self.call(['git', 'log', '-n', '1'])[0]
+
+        expected_message = 'Roll src/foo/ %s..%s (1 commit)' % (self.githash(
+            'repo_2', 1)[:9], self.githash('repo_2', 2)[:9])
+
+        self.assertIn(expected_message, stdout)
+        self.assertIn(expected_message, commit_message)
+
+    def testRollsDepLogLimit(self):
+        if not self.enabled:
+            return
+        stdout, stderr, returncode = self.call(
+            [ROLL_DEP, 'src/foo', '--log-limit', '1'])
+        expected_revision = self.githash('repo_2', 3)
+
+        self.assertEqual(stderr, '')
+        self.assertEqual(returncode, 0)
+
+        with open(os.path.join(self.src_dir, 'DEPS')) as f:
+            contents = f.read()
+
+        self.assertEqual(self.gitrevparse(self.foo_dir), expected_revision)
+        self.assertEqual([
+            'deps = {',
+            ' "src/foo": "file:///' + self.git_base.replace('\\', '\\\\') +
+            'repo_2@' + expected_revision + '",',
+            '}',
+            'hooks = [',
+            '  {"action": ["foo", "--android", "{checkout_android}"]}',
+            ']',
+        ], contents.splitlines())
 
-    commit_message = self.call(['git', 'log', '-n', '1'])[0]
+        commit_message = self.call(['git', 'log', '-n', '1'])[0]
 
-    expected_message = 'Roll src/foo/ %s..%s (2 commits)' % (
-        self.githash('repo_2', 1)[:9], self.githash('repo_2', 3)[:9])
+        expected_message = 'Roll src/foo/ %s..%s (2 commits)' % (self.githash(
+            'repo_2', 1)[:9], self.githash('repo_2', 3)[:9])
 
-    self.assertIn(expected_message, stdout)
-    self.assertIn(expected_message, commit_message)
+        self.assertIn(expected_message, stdout)
+        self.assertIn(expected_message, commit_message)
 
 
 if __name__ == '__main__':
-  level = logging.DEBUG if '-v' in sys.argv else logging.FATAL
-  logging.basicConfig(
-      level=level,
-      format='%(asctime).19s %(levelname)s %(filename)s:'
-             '%(lineno)s %(message)s')
-  unittest.main()
+    level = logging.DEBUG if '-v' in sys.argv else logging.FATAL
+    logging.basicConfig(level=level,
+                        format='%(asctime).19s %(levelname)s %(filename)s:'
+                        '%(lineno)s %(message)s')
+    unittest.main()

+ 261 - 252
tests/scm_unittest.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for scm.py."""
 
 import logging
@@ -20,265 +19,275 @@ import subprocess2
 
 
 def callError(code=1, cmd='', cwd='', stdout=b'', stderr=b''):
-  return subprocess2.CalledProcessError(code, cmd, cwd, stdout, stderr)
+    return subprocess2.CalledProcessError(code, cmd, cwd, stdout, stderr)
 
 
 class GitWrapperTestCase(unittest.TestCase):
-  def setUp(self):
-    super(GitWrapperTestCase, self).setUp()
-    self.root_dir = '/foo/bar'
-
-  @mock.patch('scm.GIT.Capture')
-  def testGetEmail(self, mockCapture):
-    mockCapture.return_value = 'mini@me.com'
-    self.assertEqual(scm.GIT.GetEmail(self.root_dir), 'mini@me.com')
-    mockCapture.assert_called_with(['config', 'user.email'], cwd=self.root_dir)
-
-  @mock.patch('scm.GIT.Capture')
-  def testAssertVersion(self, mockCapture):
-    cases = [
-        ('1.7', True),
-        ('1.7.9', True),
-        ('1.7.9.foo-bar-baz', True),
-        ('1.8', True),
-        ('1.6.9', False),
-    ]
-    for expected_version, expected_ok in cases:
-      class GIT(scm.GIT):
-        pass
-      mockCapture.return_value = 'git version ' + expected_version
-      ok, version = GIT.AssertVersion('1.7')
-      self.assertEqual(expected_ok, ok)
-      self.assertEqual(expected_version, version)
-
-  def testRefToRemoteRef(self):
-    remote = 'origin'
-    refs = {
-        'refs/branch-heads/1234': ('refs/remotes/branch-heads/', '1234'),
-        # local refs for upstream branch
-        'refs/remotes/%s/foobar' % remote: ('refs/remotes/%s/' % remote,
-                                            'foobar'),
-        '%s/foobar' % remote: ('refs/remotes/%s/' % remote, 'foobar'),
-        # upstream ref for branch
-        'refs/heads/foobar': ('refs/remotes/%s/' % remote, 'foobar'),
-        # could be either local or upstream ref, assumed to refer to
-        # upstream, but probably don't want to encourage refs like this.
-        'heads/foobar': ('refs/remotes/%s/' % remote, 'foobar'),
-        # underspecified, probably intended to refer to a local branch
-        'foobar': None,
-        # tags and other refs
-        'refs/tags/TAG': None,
-        'refs/changes/34/1234': None,
-    }
-    for k, v in refs.items():
-      r = scm.GIT.RefToRemoteRef(k, remote)
-      self.assertEqual(r, v, msg='%s -> %s, expected %s' % (k, r, v))
-
-  def testRemoteRefToRef(self):
-    remote = 'origin'
-    refs = {
-        'refs/remotes/branch-heads/1234': 'refs/branch-heads/1234',
-        # local refs for upstream branch
-        'refs/remotes/origin/foobar': 'refs/heads/foobar',
-        # tags and other refs
-        'refs/tags/TAG': 'refs/tags/TAG',
-        'refs/changes/34/1234': 'refs/changes/34/1234',
-        # different remote
-        'refs/remotes/other-remote/foobar': None,
-        # underspecified, probably intended to refer to a local branch
-        'heads/foobar': None,
-        'origin/foobar': None,
-        'foobar': None,
-        None: None,
-      }
-    for k, v in refs.items():
-      r = scm.GIT.RemoteRefToRef(k, remote)
-      self.assertEqual(r, v, msg='%s -> %s, expected %s' % (k, r, v))
-
-  @mock.patch('scm.GIT.Capture')
-  @mock.patch('os.path.exists', lambda _:True)
-  def testGetRemoteHeadRefLocal(self, mockCapture):
-    mockCapture.side_effect = ['refs/remotes/origin/main']
-    self.assertEqual('refs/remotes/origin/main',
-                     scm.GIT.GetRemoteHeadRef('foo', 'proto://url', 'origin'))
-    self.assertEqual(mockCapture.call_count, 1)
-
-  @mock.patch('scm.GIT.Capture')
-  @mock.patch('os.path.exists', lambda _: True)
-  def testGetRemoteHeadRefLocalUpdateHead(self, mockCapture):
-    mockCapture.side_effect = [
-        'refs/remotes/origin/master',  # first symbolic-ref call
-        'foo',  # set-head call
-        'refs/remotes/origin/main',  # second symbolic-ref call
-    ]
-    self.assertEqual('refs/remotes/origin/main',
-                     scm.GIT.GetRemoteHeadRef('foo', 'proto://url', 'origin'))
-    self.assertEqual(mockCapture.call_count, 3)
-
-  @mock.patch('scm.GIT.Capture')
-  @mock.patch('os.path.exists', lambda _:True)
-  def testGetRemoteHeadRefRemote(self, mockCapture):
-    mockCapture.side_effect = [
-        subprocess2.CalledProcessError(1, '', '', '', ''),
-        'ref: refs/heads/main\tHEAD\n' +
-        '0000000000000000000000000000000000000000\tHEAD',
-    ]
-    self.assertEqual('refs/remotes/origin/main',
-                     scm.GIT.GetRemoteHeadRef('foo', 'proto://url', 'origin'))
-    self.assertEqual(mockCapture.call_count, 2)
+    def setUp(self):
+        super(GitWrapperTestCase, self).setUp()
+        self.root_dir = '/foo/bar'
+
+    @mock.patch('scm.GIT.Capture')
+    def testGetEmail(self, mockCapture):
+        mockCapture.return_value = 'mini@me.com'
+        self.assertEqual(scm.GIT.GetEmail(self.root_dir), 'mini@me.com')
+        mockCapture.assert_called_with(['config', 'user.email'],
+                                       cwd=self.root_dir)
+
+    @mock.patch('scm.GIT.Capture')
+    def testAssertVersion(self, mockCapture):
+        cases = [
+            ('1.7', True),
+            ('1.7.9', True),
+            ('1.7.9.foo-bar-baz', True),
+            ('1.8', True),
+            ('1.6.9', False),
+        ]
+        for expected_version, expected_ok in cases:
+
+            class GIT(scm.GIT):
+                pass
+
+            mockCapture.return_value = 'git version ' + expected_version
+            ok, version = GIT.AssertVersion('1.7')
+            self.assertEqual(expected_ok, ok)
+            self.assertEqual(expected_version, version)
+
+    def testRefToRemoteRef(self):
+        remote = 'origin'
+        refs = {
+            'refs/branch-heads/1234': ('refs/remotes/branch-heads/', '1234'),
+            # local refs for upstream branch
+            'refs/remotes/%s/foobar' % remote:
+            ('refs/remotes/%s/' % remote, 'foobar'),
+            '%s/foobar' % remote: ('refs/remotes/%s/' % remote, 'foobar'),
+            # upstream ref for branch
+            'refs/heads/foobar': ('refs/remotes/%s/' % remote, 'foobar'),
+            # could be either local or upstream ref, assumed to refer to
+            # upstream, but probably don't want to encourage refs like this.
+            'heads/foobar': ('refs/remotes/%s/' % remote, 'foobar'),
+            # underspecified, probably intended to refer to a local branch
+            'foobar':
+            None,
+            # tags and other refs
+            'refs/tags/TAG':
+            None,
+            'refs/changes/34/1234':
+            None,
+        }
+        for k, v in refs.items():
+            r = scm.GIT.RefToRemoteRef(k, remote)
+            self.assertEqual(r, v, msg='%s -> %s, expected %s' % (k, r, v))
+
+    def testRemoteRefToRef(self):
+        remote = 'origin'
+        refs = {
+            'refs/remotes/branch-heads/1234': 'refs/branch-heads/1234',
+            # local refs for upstream branch
+            'refs/remotes/origin/foobar': 'refs/heads/foobar',
+            # tags and other refs
+            'refs/tags/TAG': 'refs/tags/TAG',
+            'refs/changes/34/1234': 'refs/changes/34/1234',
+            # different remote
+            'refs/remotes/other-remote/foobar': None,
+            # underspecified, probably intended to refer to a local branch
+            'heads/foobar': None,
+            'origin/foobar': None,
+            'foobar': None,
+            None: None,
+        }
+        for k, v in refs.items():
+            r = scm.GIT.RemoteRefToRef(k, remote)
+            self.assertEqual(r, v, msg='%s -> %s, expected %s' % (k, r, v))
+
+    @mock.patch('scm.GIT.Capture')
+    @mock.patch('os.path.exists', lambda _: True)
+    def testGetRemoteHeadRefLocal(self, mockCapture):
+        mockCapture.side_effect = ['refs/remotes/origin/main']
+        self.assertEqual(
+            'refs/remotes/origin/main',
+            scm.GIT.GetRemoteHeadRef('foo', 'proto://url', 'origin'))
+        self.assertEqual(mockCapture.call_count, 1)
+
+    @mock.patch('scm.GIT.Capture')
+    @mock.patch('os.path.exists', lambda _: True)
+    def testGetRemoteHeadRefLocalUpdateHead(self, mockCapture):
+        mockCapture.side_effect = [
+            'refs/remotes/origin/master',  # first symbolic-ref call
+            'foo',  # set-head call
+            'refs/remotes/origin/main',  # second symbolic-ref call
+        ]
+        self.assertEqual(
+            'refs/remotes/origin/main',
+            scm.GIT.GetRemoteHeadRef('foo', 'proto://url', 'origin'))
+        self.assertEqual(mockCapture.call_count, 3)
+
+    @mock.patch('scm.GIT.Capture')
+    @mock.patch('os.path.exists', lambda _: True)
+    def testGetRemoteHeadRefRemote(self, mockCapture):
+        mockCapture.side_effect = [
+            subprocess2.CalledProcessError(1, '', '', '', ''),
+            'ref: refs/heads/main\tHEAD\n' +
+            '0000000000000000000000000000000000000000\tHEAD',
+        ]
+        self.assertEqual(
+            'refs/remotes/origin/main',
+            scm.GIT.GetRemoteHeadRef('foo', 'proto://url', 'origin'))
+        self.assertEqual(mockCapture.call_count, 2)
 
 
 class RealGitTest(fake_repos.FakeReposTestBase):
-  def setUp(self):
-    super(RealGitTest, self).setUp()
-    self.enabled = self.FAKE_REPOS.set_up_git()
-    if self.enabled:
-      self.cwd = scm.os.path.join(self.FAKE_REPOS.git_base, 'repo_1')
-    else:
-      self.skipTest('git fake repos not available')
-
-  def testResolveCommit(self):
-    self.assertIsNone(scm.GIT.ResolveCommit(self.cwd, 'zebra'))
-    self.assertIsNone(scm.GIT.ResolveCommit(self.cwd, 'r123456'))
-    first_rev = self.githash('repo_1', 1)
-    self.assertEqual(first_rev, scm.GIT.ResolveCommit(self.cwd, first_rev))
-    self.assertEqual(
-        self.githash('repo_1', 2), scm.GIT.ResolveCommit(self.cwd, 'HEAD'))
-
-  def testIsValidRevision(self):
-    # Sha1's are [0-9a-z]{32}, so starting with a 'z' or 'r' should always fail.
-    self.assertFalse(scm.GIT.IsValidRevision(cwd=self.cwd, rev='zebra'))
-    self.assertFalse(scm.GIT.IsValidRevision(cwd=self.cwd, rev='r123456'))
-    # Valid cases
-    first_rev = self.githash('repo_1', 1)
-    self.assertTrue(scm.GIT.IsValidRevision(cwd=self.cwd, rev=first_rev))
-    self.assertTrue(scm.GIT.IsValidRevision(cwd=self.cwd, rev='HEAD'))
-
-  def testIsAncestor(self):
-    self.assertTrue(
-        scm.GIT.IsAncestor(self.githash('repo_1', 1),
-                           self.githash('repo_1', 2),
-                           cwd=self.cwd))
-    self.assertFalse(
-        scm.GIT.IsAncestor(self.githash('repo_1', 2),
-                           self.githash('repo_1', 1),
-                           cwd=self.cwd))
-    self.assertFalse(scm.GIT.IsAncestor(self.githash('repo_1', 1), 'zebra'))
-
-  def testGetAllFiles(self):
-    self.assertEqual(['DEPS', 'foo bar', 'origin'],
-                     scm.GIT.GetAllFiles(self.cwd))
-
-  def testGetSetConfig(self):
-    key = 'scm.test-key'
-
-    self.assertIsNone(scm.GIT.GetConfig(self.cwd, key))
-    self.assertEqual(
-        'default-value', scm.GIT.GetConfig(self.cwd, key, 'default-value'))
-
-    scm.GIT.SetConfig(self.cwd, key, 'set-value')
-    self.assertEqual('set-value', scm.GIT.GetConfig(self.cwd, key))
-    self.assertEqual(
-        'set-value', scm.GIT.GetConfig(self.cwd, key, 'default-value'))
-
-    scm.GIT.SetConfig(self.cwd, key)
-    self.assertIsNone(scm.GIT.GetConfig(self.cwd, key))
-    self.assertEqual(
-        'default-value', scm.GIT.GetConfig(self.cwd, key, 'default-value'))
-
-  def testGetSetBranchConfig(self):
-    branch = scm.GIT.GetBranch(self.cwd)
-    key = 'scm.test-key'
-
-    self.assertIsNone(scm.GIT.GetBranchConfig(self.cwd, branch, key))
-    self.assertEqual(
-        'default-value',
-        scm.GIT.GetBranchConfig(self.cwd, branch, key, 'default-value'))
-
-    scm.GIT.SetBranchConfig(self.cwd, branch, key, 'set-value')
-    self.assertEqual(
-        'set-value', scm.GIT.GetBranchConfig(self.cwd, branch, key))
-    self.assertEqual(
-        'set-value',
-        scm.GIT.GetBranchConfig(self.cwd, branch, key, 'default-value'))
-    self.assertEqual(
-        'set-value',
-        scm.GIT.GetConfig(self.cwd, 'branch.%s.%s' % (branch, key)))
-
-    scm.GIT.SetBranchConfig(self.cwd, branch, key)
-    self.assertIsNone(scm.GIT.GetBranchConfig(self.cwd, branch, key))
-
-  def testFetchUpstreamTuple_NoUpstreamFound(self):
-    self.assertEqual(
-        (None, None), scm.GIT.FetchUpstreamTuple(self.cwd))
-
-  @mock.patch('scm.GIT.GetRemoteBranches', return_value=['origin/main'])
-  def testFetchUpstreamTuple_GuessOriginMaster(self, _mockGetRemoteBranches):
-    self.assertEqual(('origin', 'refs/heads/main'),
-                     scm.GIT.FetchUpstreamTuple(self.cwd))
-
-  @mock.patch('scm.GIT.GetRemoteBranches',
-              return_value=['origin/master', 'origin/main'])
-  def testFetchUpstreamTuple_GuessOriginMain(self, _mockGetRemoteBranches):
-    self.assertEqual(('origin', 'refs/heads/main'),
-                     scm.GIT.FetchUpstreamTuple(self.cwd))
-
-  def testFetchUpstreamTuple_RietveldUpstreamConfig(self):
-    scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-branch', 'rietveld-upstream')
-    scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-remote', 'rietveld-remote')
-    self.assertEqual(
-        ('rietveld-remote', 'rietveld-upstream'),
-        scm.GIT.FetchUpstreamTuple(self.cwd))
-    scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-branch')
-    scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-remote')
-
-  @mock.patch('scm.GIT.GetBranch', side_effect=callError())
-  def testFetchUpstreamTuple_NotOnBranch(self, _mockGetBranch):
-    scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-branch', 'rietveld-upstream')
-    scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-remote', 'rietveld-remote')
-    self.assertEqual(
-        ('rietveld-remote', 'rietveld-upstream'),
-        scm.GIT.FetchUpstreamTuple(self.cwd))
-    scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-branch')
-    scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-remote')
-
-  def testFetchUpstreamTuple_BranchConfig(self):
-    branch = scm.GIT.GetBranch(self.cwd)
-    scm.GIT.SetBranchConfig(self.cwd, branch, 'merge', 'branch-merge')
-    scm.GIT.SetBranchConfig(self.cwd, branch, 'remote', 'branch-remote')
-    self.assertEqual(
-        ('branch-remote', 'branch-merge'), scm.GIT.FetchUpstreamTuple(self.cwd))
-    scm.GIT.SetBranchConfig(self.cwd, branch, 'merge')
-    scm.GIT.SetBranchConfig(self.cwd, branch, 'remote')
-
-  def testFetchUpstreamTuple_AnotherBranchConfig(self):
-    branch = 'scm-test-branch'
-    scm.GIT.SetBranchConfig(self.cwd, branch, 'merge', 'other-merge')
-    scm.GIT.SetBranchConfig(self.cwd, branch, 'remote', 'other-remote')
-    self.assertEqual(
-        ('other-remote', 'other-merge'),
-        scm.GIT.FetchUpstreamTuple(self.cwd, branch))
-    scm.GIT.SetBranchConfig(self.cwd, branch, 'merge')
-    scm.GIT.SetBranchConfig(self.cwd, branch, 'remote')
-
-  def testGetBranchRef(self):
-    self.assertEqual('refs/heads/main', scm.GIT.GetBranchRef(self.cwd))
-    HEAD = scm.GIT.Capture(['rev-parse', 'HEAD'], cwd=self.cwd)
-    scm.GIT.Capture(['checkout', HEAD], cwd=self.cwd)
-    self.assertIsNone(scm.GIT.GetBranchRef(self.cwd))
-    scm.GIT.Capture(['checkout', 'main'], cwd=self.cwd)
-
-  def testGetBranch(self):
-    self.assertEqual('main', scm.GIT.GetBranch(self.cwd))
-    HEAD = scm.GIT.Capture(['rev-parse', 'HEAD'], cwd=self.cwd)
-    scm.GIT.Capture(['checkout', HEAD], cwd=self.cwd)
-    self.assertIsNone(scm.GIT.GetBranchRef(self.cwd))
-    scm.GIT.Capture(['checkout', 'main'], cwd=self.cwd)
+    def setUp(self):
+        super(RealGitTest, self).setUp()
+        self.enabled = self.FAKE_REPOS.set_up_git()
+        if self.enabled:
+            self.cwd = scm.os.path.join(self.FAKE_REPOS.git_base, 'repo_1')
+        else:
+            self.skipTest('git fake repos not available')
+
+    def testResolveCommit(self):
+        self.assertIsNone(scm.GIT.ResolveCommit(self.cwd, 'zebra'))
+        self.assertIsNone(scm.GIT.ResolveCommit(self.cwd, 'r123456'))
+        first_rev = self.githash('repo_1', 1)
+        self.assertEqual(first_rev, scm.GIT.ResolveCommit(self.cwd, first_rev))
+        self.assertEqual(self.githash('repo_1', 2),
+                         scm.GIT.ResolveCommit(self.cwd, 'HEAD'))
+
+    def testIsValidRevision(self):
+        # Sha1's are [0-9a-z]{32}, so starting with a 'z' or 'r' should always
+        # fail.
+        self.assertFalse(scm.GIT.IsValidRevision(cwd=self.cwd, rev='zebra'))
+        self.assertFalse(scm.GIT.IsValidRevision(cwd=self.cwd, rev='r123456'))
+        # Valid cases
+        first_rev = self.githash('repo_1', 1)
+        self.assertTrue(scm.GIT.IsValidRevision(cwd=self.cwd, rev=first_rev))
+        self.assertTrue(scm.GIT.IsValidRevision(cwd=self.cwd, rev='HEAD'))
+
+    def testIsAncestor(self):
+        self.assertTrue(
+            scm.GIT.IsAncestor(self.githash('repo_1', 1),
+                               self.githash('repo_1', 2),
+                               cwd=self.cwd))
+        self.assertFalse(
+            scm.GIT.IsAncestor(self.githash('repo_1', 2),
+                               self.githash('repo_1', 1),
+                               cwd=self.cwd))
+        self.assertFalse(scm.GIT.IsAncestor(self.githash('repo_1', 1), 'zebra'))
+
+    def testGetAllFiles(self):
+        self.assertEqual(['DEPS', 'foo bar', 'origin'],
+                         scm.GIT.GetAllFiles(self.cwd))
+
+    def testGetSetConfig(self):
+        key = 'scm.test-key'
+
+        self.assertIsNone(scm.GIT.GetConfig(self.cwd, key))
+        self.assertEqual('default-value',
+                         scm.GIT.GetConfig(self.cwd, key, 'default-value'))
+
+        scm.GIT.SetConfig(self.cwd, key, 'set-value')
+        self.assertEqual('set-value', scm.GIT.GetConfig(self.cwd, key))
+        self.assertEqual('set-value',
+                         scm.GIT.GetConfig(self.cwd, key, 'default-value'))
+
+        scm.GIT.SetConfig(self.cwd, key)
+        self.assertIsNone(scm.GIT.GetConfig(self.cwd, key))
+        self.assertEqual('default-value',
+                         scm.GIT.GetConfig(self.cwd, key, 'default-value'))
+
+    def testGetSetBranchConfig(self):
+        branch = scm.GIT.GetBranch(self.cwd)
+        key = 'scm.test-key'
+
+        self.assertIsNone(scm.GIT.GetBranchConfig(self.cwd, branch, key))
+        self.assertEqual(
+            'default-value',
+            scm.GIT.GetBranchConfig(self.cwd, branch, key, 'default-value'))
+
+        scm.GIT.SetBranchConfig(self.cwd, branch, key, 'set-value')
+        self.assertEqual('set-value',
+                         scm.GIT.GetBranchConfig(self.cwd, branch, key))
+        self.assertEqual(
+            'set-value',
+            scm.GIT.GetBranchConfig(self.cwd, branch, key, 'default-value'))
+        self.assertEqual(
+            'set-value',
+            scm.GIT.GetConfig(self.cwd, 'branch.%s.%s' % (branch, key)))
+
+        scm.GIT.SetBranchConfig(self.cwd, branch, key)
+        self.assertIsNone(scm.GIT.GetBranchConfig(self.cwd, branch, key))
+
+    def testFetchUpstreamTuple_NoUpstreamFound(self):
+        self.assertEqual((None, None), scm.GIT.FetchUpstreamTuple(self.cwd))
+
+    @mock.patch('scm.GIT.GetRemoteBranches', return_value=['origin/main'])
+    def testFetchUpstreamTuple_GuessOriginMaster(self, _mockGetRemoteBranches):
+        self.assertEqual(('origin', 'refs/heads/main'),
+                         scm.GIT.FetchUpstreamTuple(self.cwd))
+
+    @mock.patch('scm.GIT.GetRemoteBranches',
+                return_value=['origin/master', 'origin/main'])
+    def testFetchUpstreamTuple_GuessOriginMain(self, _mockGetRemoteBranches):
+        self.assertEqual(('origin', 'refs/heads/main'),
+                         scm.GIT.FetchUpstreamTuple(self.cwd))
+
+    def testFetchUpstreamTuple_RietveldUpstreamConfig(self):
+        scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-branch',
+                          'rietveld-upstream')
+        scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-remote',
+                          'rietveld-remote')
+        self.assertEqual(('rietveld-remote', 'rietveld-upstream'),
+                         scm.GIT.FetchUpstreamTuple(self.cwd))
+        scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-branch')
+        scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-remote')
+
+    @mock.patch('scm.GIT.GetBranch', side_effect=callError())
+    def testFetchUpstreamTuple_NotOnBranch(self, _mockGetBranch):
+        scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-branch',
+                          'rietveld-upstream')
+        scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-remote',
+                          'rietveld-remote')
+        self.assertEqual(('rietveld-remote', 'rietveld-upstream'),
+                         scm.GIT.FetchUpstreamTuple(self.cwd))
+        scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-branch')
+        scm.GIT.SetConfig(self.cwd, 'rietveld.upstream-remote')
+
+    def testFetchUpstreamTuple_BranchConfig(self):
+        branch = scm.GIT.GetBranch(self.cwd)
+        scm.GIT.SetBranchConfig(self.cwd, branch, 'merge', 'branch-merge')
+        scm.GIT.SetBranchConfig(self.cwd, branch, 'remote', 'branch-remote')
+        self.assertEqual(('branch-remote', 'branch-merge'),
+                         scm.GIT.FetchUpstreamTuple(self.cwd))
+        scm.GIT.SetBranchConfig(self.cwd, branch, 'merge')
+        scm.GIT.SetBranchConfig(self.cwd, branch, 'remote')
+
+    def testFetchUpstreamTuple_AnotherBranchConfig(self):
+        branch = 'scm-test-branch'
+        scm.GIT.SetBranchConfig(self.cwd, branch, 'merge', 'other-merge')
+        scm.GIT.SetBranchConfig(self.cwd, branch, 'remote', 'other-remote')
+        self.assertEqual(('other-remote', 'other-merge'),
+                         scm.GIT.FetchUpstreamTuple(self.cwd, branch))
+        scm.GIT.SetBranchConfig(self.cwd, branch, 'merge')
+        scm.GIT.SetBranchConfig(self.cwd, branch, 'remote')
+
+    def testGetBranchRef(self):
+        self.assertEqual('refs/heads/main', scm.GIT.GetBranchRef(self.cwd))
+        HEAD = scm.GIT.Capture(['rev-parse', 'HEAD'], cwd=self.cwd)
+        scm.GIT.Capture(['checkout', HEAD], cwd=self.cwd)
+        self.assertIsNone(scm.GIT.GetBranchRef(self.cwd))
+        scm.GIT.Capture(['checkout', 'main'], cwd=self.cwd)
+
+    def testGetBranch(self):
+        self.assertEqual('main', scm.GIT.GetBranch(self.cwd))
+        HEAD = scm.GIT.Capture(['rev-parse', 'HEAD'], cwd=self.cwd)
+        scm.GIT.Capture(['checkout', HEAD], cwd=self.cwd)
+        self.assertIsNone(scm.GIT.GetBranchRef(self.cwd))
+        scm.GIT.Capture(['checkout', 'main'], cwd=self.cwd)
 
 
 if __name__ == '__main__':
-  if '-v' in sys.argv:
-    logging.basicConfig(level=logging.DEBUG)
-  unittest.main()
+    if '-v' in sys.argv:
+        logging.basicConfig(level=logging.DEBUG)
+    unittest.main()
 
 # vim: ts=2:sw=2:tw=80:et:

+ 175 - 166
tests/split_cl_test.py

@@ -12,176 +12,185 @@ import split_cl
 
 
 class SplitClTest(unittest.TestCase):
-  def testAddUploadedByGitClSplitToDescription(self):
-    description = """Convert use of X to Y in $directory
+    def testAddUploadedByGitClSplitToDescription(self):
+        description = """Convert use of X to Y in $directory
 
 <add some background about this conversion for the reviewers>
 
 """
-    footers = 'Bug: 12345'
-
-    added_line = 'This CL was uploaded by git cl split.'
-
-    # Description without footers
-    self.assertEqual(split_cl.AddUploadedByGitClSplitToDescription(description),
-                     description + added_line)
-    # Description with footers
-    self.assertEqual(
-        split_cl.AddUploadedByGitClSplitToDescription(description + footers),
-        description + added_line + '\n\n' + footers)
-
-  def testFormatDescriptionOrComment(self):
-    description = "Converted use of X to Y in $directory."
-
-    # One directory
-    self.assertEqual(split_cl.FormatDescriptionOrComment(description, ["foo"]),
-                     "Converted use of X to Y in /foo.")
-
-    # Many directories
-    self.assertEqual(
-        split_cl.FormatDescriptionOrComment(description, ["foo", "bar"]),
-        "Converted use of X to Y in ['/foo', '/bar'].")
-
-  def GetDirectoryBaseName(self, file_path):
-    return os.path.basename(os.path.dirname(file_path))
-
-  def MockSuggestOwners(self, paths, exclude=None):
-    if not paths:
-      return ["superowner"]
-    return self.GetDirectoryBaseName(paths[0]).split(",")
-
-  def MockIsFile(self, file_path):
-    if os.path.basename(file_path) == "OWNERS":
-      return "owner" in self.GetDirectoryBaseName(file_path)
-
-    return True
-
-  @mock.patch("os.path.isfile")
-  def testSelectReviewersForFiles(self, mock_is_file):
-    mock_is_file.side_effect = self.MockIsFile
-
-    owners_client = mock.Mock(SuggestOwners=self.MockSuggestOwners,
-                              EVERYONE="*")
-    cl = mock.Mock(owners_client=owners_client)
-
-    files = [("M", os.path.join("foo", "owner1,owner2", "a.txt")),
-             ("M", os.path.join("foo", "owner1,owner2", "b.txt")),
-             ("M", os.path.join("bar", "owner1,owner2", "c.txt")),
-             ("M", os.path.join("bax", "owner2", "d.txt")),
-             ("M", os.path.join("baz", "owner3", "e.txt"))]
-
-    files_split_by_reviewers = split_cl.SelectReviewersForFiles(
-        cl, "author", files, 0)
-
-    self.assertEqual(3, len(files_split_by_reviewers.keys()))
-    info1 = files_split_by_reviewers[tuple(["owner1", "owner2"])]
-    self.assertEqual(info1.files,
-                     [("M", os.path.join("foo", "owner1,owner2", "a.txt")),
-                      ("M", os.path.join("foo", "owner1,owner2", "b.txt")),
-                      ("M", os.path.join("bar", "owner1,owner2", "c.txt"))])
-    self.assertEqual(info1.owners_directories,
-                     ["foo/owner1,owner2", "bar/owner1,owner2"])
-    info2 = files_split_by_reviewers[tuple(["owner2"])]
-    self.assertEqual(info2.files,
-                     [("M", os.path.join("bax", "owner2", "d.txt"))])
-    self.assertEqual(info2.owners_directories, ["bax/owner2"])
-    info3 = files_split_by_reviewers[tuple(["owner3"])]
-    self.assertEqual(info3.files,
-                     [("M", os.path.join("baz", "owner3", "e.txt"))])
-    self.assertEqual(info3.owners_directories, ["baz/owner3"])
-
-  class UploadClTester:
-    """Sets up test environment for testing split_cl.UploadCl()"""
-    def __init__(self, test):
-      self.mock_git_branches = self.StartPatcher("git_common.branches", test)
-      self.mock_git_branches.return_value = []
-      self.mock_git_current_branch = self.StartPatcher(
-          "git_common.current_branch", test)
-      self.mock_git_current_branch.return_value = "branch_to_upload"
-      self.mock_git_run = self.StartPatcher("git_common.run", test)
-      self.mock_temporary_file = self.StartPatcher(
-          "gclient_utils.temporary_file", test)
-      self.mock_temporary_file().__enter__.return_value = "temporary_file0"
-      self.mock_file_writer = self.StartPatcher("gclient_utils.FileWrite", test)
-
-    def StartPatcher(self, target, test):
-      patcher = mock.patch(target)
-      test.addCleanup(patcher.stop)
-      return patcher.start()
-
-    def DoUploadCl(self, directories, files, reviewers, cmd_upload):
-      split_cl.UploadCl("branch_to_upload", "upstream_branch",
-                        directories, files, "description", None, reviewers,
-                        mock.Mock(), cmd_upload, True, True, "topic",
-                        os.path.sep)
-
-  def testUploadCl(self):
-    """Tests commands run by UploadCl."""
-
-    upload_cl_tester = self.UploadClTester(self)
-
-    directories = ["dir0"]
-    files = [("M", os.path.join("bar", "a.cc")),
-             ("D", os.path.join("foo", "b.cc"))]
-    reviewers = {"reviewer1@gmail.com", "reviewer2@gmail.com"}
-    mock_cmd_upload = mock.Mock()
-    upload_cl_tester.DoUploadCl(directories, files, reviewers, mock_cmd_upload)
-
-    abs_repository_path = os.path.abspath(os.path.sep)
-    mock_git_run = upload_cl_tester.mock_git_run
-    self.assertEqual(mock_git_run.call_count, 4)
-    mock_git_run.assert_has_calls([
-        mock.call("checkout", "-t", "upstream_branch", "-b",
-                  "branch_to_upload_dir0_split"),
-        mock.call("rm", os.path.join(abs_repository_path, "foo", "b.cc")),
-        mock.call("checkout", "branch_to_upload", "--",
-                  os.path.join(abs_repository_path, "bar", "a.cc")),
-        mock.call("commit", "-F", "temporary_file0")
-    ])
-
-    expected_upload_args = [
-        "-f", "-r", "reviewer1@gmail.com,reviewer2@gmail.com", "--cq-dry-run",
-        "--send-mail", "--enable-auto-submit", "--topic=topic"
-    ]
-    mock_cmd_upload.assert_called_once_with(expected_upload_args)
-
-  def testDontUploadClIfBranchAlreadyExists(self):
-    """Tests that a CL is not uploaded if split branch already exists"""
-
-    upload_cl_tester = self.UploadClTester(self)
-    upload_cl_tester.mock_git_branches.return_value = [
-        "branch0", "branch_to_upload_dir0_split"
-    ]
-
-    directories = ["dir0"]
-    files = [("M", os.path.join("bar", "a.cc")),
-             ("D", os.path.join("foo", "b.cc"))]
-    reviewers = {"reviewer1@gmail.com"}
-    mock_cmd_upload = mock.Mock()
-    upload_cl_tester.DoUploadCl(directories, files, reviewers, mock_cmd_upload)
-
-    upload_cl_tester.mock_git_run.assert_not_called()
-    mock_cmd_upload.assert_not_called()
-
-  @mock.patch("gclient_utils.AskForData")
-  def testCheckDescriptionBugLink(self, mock_ask_for_data):
-    # Description contains bug link.
-    self.assertTrue(split_cl.CheckDescriptionBugLink("Bug:1234"))
-    self.assertEqual(mock_ask_for_data.call_count, 0)
-
-    # Description does not contain bug link. User does not enter 'y' when
-    # prompted.
-    mock_ask_for_data.reset_mock()
-    mock_ask_for_data.return_value = "m"
-    self.assertFalse(split_cl.CheckDescriptionBugLink("Description"))
-    self.assertEqual(mock_ask_for_data.call_count, 1)
-
-    # Description does not contain bug link. User enters 'y' when prompted.
-    mock_ask_for_data.reset_mock()
-    mock_ask_for_data.return_value = "y"
-    self.assertTrue(split_cl.CheckDescriptionBugLink("Description"))
-    self.assertEqual(mock_ask_for_data.call_count, 1)
+        footers = 'Bug: 12345'
+
+        added_line = 'This CL was uploaded by git cl split.'
+
+        # Description without footers
+        self.assertEqual(
+            split_cl.AddUploadedByGitClSplitToDescription(description),
+            description + added_line)
+        # Description with footers
+        self.assertEqual(
+            split_cl.AddUploadedByGitClSplitToDescription(description +
+                                                          footers),
+            description + added_line + '\n\n' + footers)
+
+    def testFormatDescriptionOrComment(self):
+        description = "Converted use of X to Y in $directory."
+
+        # One directory
+        self.assertEqual(
+            split_cl.FormatDescriptionOrComment(description, ["foo"]),
+            "Converted use of X to Y in /foo.")
+
+        # Many directories
+        self.assertEqual(
+            split_cl.FormatDescriptionOrComment(description, ["foo", "bar"]),
+            "Converted use of X to Y in ['/foo', '/bar'].")
+
+    def GetDirectoryBaseName(self, file_path):
+        return os.path.basename(os.path.dirname(file_path))
+
+    def MockSuggestOwners(self, paths, exclude=None):
+        if not paths:
+            return ["superowner"]
+        return self.GetDirectoryBaseName(paths[0]).split(",")
+
+    def MockIsFile(self, file_path):
+        if os.path.basename(file_path) == "OWNERS":
+            return "owner" in self.GetDirectoryBaseName(file_path)
+
+        return True
+
+    @mock.patch("os.path.isfile")
+    def testSelectReviewersForFiles(self, mock_is_file):
+        mock_is_file.side_effect = self.MockIsFile
+
+        owners_client = mock.Mock(SuggestOwners=self.MockSuggestOwners,
+                                  EVERYONE="*")
+        cl = mock.Mock(owners_client=owners_client)
+
+        files = [("M", os.path.join("foo", "owner1,owner2", "a.txt")),
+                 ("M", os.path.join("foo", "owner1,owner2", "b.txt")),
+                 ("M", os.path.join("bar", "owner1,owner2", "c.txt")),
+                 ("M", os.path.join("bax", "owner2", "d.txt")),
+                 ("M", os.path.join("baz", "owner3", "e.txt"))]
+
+        files_split_by_reviewers = split_cl.SelectReviewersForFiles(
+            cl, "author", files, 0)
+
+        self.assertEqual(3, len(files_split_by_reviewers.keys()))
+        info1 = files_split_by_reviewers[tuple(["owner1", "owner2"])]
+        self.assertEqual(info1.files,
+                         [("M", os.path.join("foo", "owner1,owner2", "a.txt")),
+                          ("M", os.path.join("foo", "owner1,owner2", "b.txt")),
+                          ("M", os.path.join("bar", "owner1,owner2", "c.txt"))])
+        self.assertEqual(info1.owners_directories,
+                         ["foo/owner1,owner2", "bar/owner1,owner2"])
+        info2 = files_split_by_reviewers[tuple(["owner2"])]
+        self.assertEqual(info2.files,
+                         [("M", os.path.join("bax", "owner2", "d.txt"))])
+        self.assertEqual(info2.owners_directories, ["bax/owner2"])
+        info3 = files_split_by_reviewers[tuple(["owner3"])]
+        self.assertEqual(info3.files,
+                         [("M", os.path.join("baz", "owner3", "e.txt"))])
+        self.assertEqual(info3.owners_directories, ["baz/owner3"])
+
+    class UploadClTester:
+        """Sets up test environment for testing split_cl.UploadCl()"""
+        def __init__(self, test):
+            self.mock_git_branches = self.StartPatcher("git_common.branches",
+                                                       test)
+            self.mock_git_branches.return_value = []
+            self.mock_git_current_branch = self.StartPatcher(
+                "git_common.current_branch", test)
+            self.mock_git_current_branch.return_value = "branch_to_upload"
+            self.mock_git_run = self.StartPatcher("git_common.run", test)
+            self.mock_temporary_file = self.StartPatcher(
+                "gclient_utils.temporary_file", test)
+            self.mock_temporary_file(
+            ).__enter__.return_value = "temporary_file0"
+            self.mock_file_writer = self.StartPatcher("gclient_utils.FileWrite",
+                                                      test)
+
+        def StartPatcher(self, target, test):
+            patcher = mock.patch(target)
+            test.addCleanup(patcher.stop)
+            return patcher.start()
+
+        def DoUploadCl(self, directories, files, reviewers, cmd_upload):
+            split_cl.UploadCl("branch_to_upload", "upstream_branch",
+                              directories, files, "description", None,
+                              reviewers, mock.Mock(), cmd_upload, True, True,
+                              "topic", os.path.sep)
+
+    def testUploadCl(self):
+        """Tests commands run by UploadCl."""
+
+        upload_cl_tester = self.UploadClTester(self)
+
+        directories = ["dir0"]
+        files = [("M", os.path.join("bar", "a.cc")),
+                 ("D", os.path.join("foo", "b.cc"))]
+        reviewers = {"reviewer1@gmail.com", "reviewer2@gmail.com"}
+        mock_cmd_upload = mock.Mock()
+        upload_cl_tester.DoUploadCl(directories, files, reviewers,
+                                    mock_cmd_upload)
+
+        abs_repository_path = os.path.abspath(os.path.sep)
+        mock_git_run = upload_cl_tester.mock_git_run
+        self.assertEqual(mock_git_run.call_count, 4)
+        mock_git_run.assert_has_calls([
+            mock.call("checkout", "-t", "upstream_branch", "-b",
+                      "branch_to_upload_dir0_split"),
+            mock.call("rm", os.path.join(abs_repository_path, "foo", "b.cc")),
+            mock.call("checkout", "branch_to_upload", "--",
+                      os.path.join(abs_repository_path, "bar", "a.cc")),
+            mock.call("commit", "-F", "temporary_file0")
+        ])
+
+        expected_upload_args = [
+            "-f", "-r", "reviewer1@gmail.com,reviewer2@gmail.com",
+            "--cq-dry-run", "--send-mail", "--enable-auto-submit",
+            "--topic=topic"
+        ]
+        mock_cmd_upload.assert_called_once_with(expected_upload_args)
+
+    def testDontUploadClIfBranchAlreadyExists(self):
+        """Tests that a CL is not uploaded if split branch already exists"""
+
+        upload_cl_tester = self.UploadClTester(self)
+        upload_cl_tester.mock_git_branches.return_value = [
+            "branch0", "branch_to_upload_dir0_split"
+        ]
+
+        directories = ["dir0"]
+        files = [("M", os.path.join("bar", "a.cc")),
+                 ("D", os.path.join("foo", "b.cc"))]
+        reviewers = {"reviewer1@gmail.com"}
+        mock_cmd_upload = mock.Mock()
+        upload_cl_tester.DoUploadCl(directories, files, reviewers,
+                                    mock_cmd_upload)
+
+        upload_cl_tester.mock_git_run.assert_not_called()
+        mock_cmd_upload.assert_not_called()
+
+    @mock.patch("gclient_utils.AskForData")
+    def testCheckDescriptionBugLink(self, mock_ask_for_data):
+        # Description contains bug link.
+        self.assertTrue(split_cl.CheckDescriptionBugLink("Bug:1234"))
+        self.assertEqual(mock_ask_for_data.call_count, 0)
+
+        # Description does not contain bug link. User does not enter 'y' when
+        # prompted.
+        mock_ask_for_data.reset_mock()
+        mock_ask_for_data.return_value = "m"
+        self.assertFalse(split_cl.CheckDescriptionBugLink("Description"))
+        self.assertEqual(mock_ask_for_data.call_count, 1)
+
+        # Description does not contain bug link. User enters 'y' when prompted.
+        mock_ask_for_data.reset_mock()
+        mock_ask_for_data.return_value = "y"
+        self.assertTrue(split_cl.CheckDescriptionBugLink("Description"))
+        self.assertEqual(mock_ask_for_data.call_count, 1)
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 131 - 137
tests/subcommand_test.py

@@ -15,143 +15,137 @@ import subcommand
 
 
 class SubcommandTest(unittest.TestCase):
-  def setUp(self):
-    super(SubcommandTest, self).setUp()
-    self.module = mock.Mock(__doc__='Module documentation')
-    self.parser = mock.Mock()
-    self.sc = subcommand.CommandDispatcher(__name__)
-    self.sc.module = self.module
-
-  def testEnumerateCommands(self):
-    self.module.CMDfoo_bar = object()
-    self.module.CMDbaz = object()
-    self.module.CMDaBcDeF_0123 = object()
-
-    expected = {
-      'foo-bar': self.module.CMDfoo_bar,
-      'baz': self.module.CMDbaz,
-      'aBcDeF-0123': self.module.CMDaBcDeF_0123,
-      'help': subcommand.CMDhelp,
-    }
-    self.assertEqual(expected, self.sc.enumerate_commands())
-
-  def testEnumerateCommands_CustomHelp(self):
-    self.module.CMDhelp = object()
-    self.assertEqual(
-        {'help': self.module.CMDhelp}, self.sc.enumerate_commands())
-
-  def testFindNearestCommand_ExactMatch(self):
-    self.module.CMDfoo = object()
-    self.assertEqual(
-        self.module.CMDfoo, self.sc.find_nearest_command('foo'))
-
-  def testFindNearestCommand_UniquePrefix(self):
-    self.module.CMDfoo = object()
-    self.module.CMDbar = object()
-    self.module.CMDunique_prefix = object()
-    self.assertEqual(
-        self.module.CMDunique_prefix,
-        self.sc.find_nearest_command('unique-pre'))
-
-  def testFindNearestCommand_NonUniquePrefix(self):
-    self.module.CMDprefix1 = object()
-    self.module.CMDprefix2 = object()
-    self.assertIsNone(self.sc.find_nearest_command('prefix'))
-
-  def testFindNearestCommand_CloseEnough(self):
-    self.module.CMDfoo = object()
-    self.module.CMDbar = object()
-    self.module.CMDclose_enough = object()
-    self.assertEqual(
-        self.module.CMDclose_enough,
-        self.sc.find_nearest_command('clos-enough'))
-
-  def testFindNearestCommand_TooManyCloseEnough(self):
-    self.module.CMDcase_enough = object()
-    self.module.CMDclose_enough = object()
-    self.assertIsNone(self.sc.find_nearest_command('clase-enough'))
-
-  def testFindNearestCommand_ClosestIsNotCloseEnough(self):
-    self.module.CMDfoo = object()
-    self.module.CMDbar = object()
-    self.module.CMDnot_close_enough = object()
-    self.assertIsNone(self.sc.find_nearest_command('clos-enof'))
-
-  def _setUpTestCommands(self):
-    self.module.CMDhelp = mock.Mock(
-        __name__='CMDhelp',
-        __doc__='help documentation',
-        usage_more=None,
-        return_value=0)
-    self.module.CMDfoo = mock.Mock(
-        __name__='CMDfoo',
-        __doc__='foo documentation',
-        usage_more='usage more',
-        epilog='epilog',
-        return_value=123)
-    self.module.CMDbar_baz = mock.Mock(
-        __name__='CMDbar_baz',
-        __doc__='bar-baz documentation',
-        usage_more=None,
-        epilog=None,
-        return_value=0)
-
-  def testExecute(self):
-    self._setUpTestCommands()
-
-    self.assertEqual(
-        123, self.sc.execute(self.parser, ['foo', '--bar', '--baz']))
-    self.module.CMDfoo.assert_called_once_with(self.parser, ['--bar', '--baz'])
-    self.assertEqual('foo documentation\n\n', self.parser.description)
-    self.assertEqual('\nepilog\n', self.parser.epilog)
-    self.parser.set_usage.assert_called_once_with(
-        'usage: %prog foo [options] usage more')
-
-  def testExecute_Help(self):
-    self._setUpTestCommands()
-
-    self.assertEqual(0, self.sc.execute(self.parser, ['--help']))
-    self.module.CMDhelp.assert_called_once_with(self.parser, [])
-    self.assertEqual(
-        'Module documentation\n\n'
-        'Commands are:\n'
-        '  bar-baz bar-baz documentation\n'
-        '  foo     foo documentation\n'
-        '  help    help documentation\n',
-        self.parser.description)
-    self.parser.set_usage.assert_called_once_with(
-        'usage: %prog <command> [options]')
-
-  def testExecute_CommandHelp(self):
-    self._setUpTestCommands()
-
-    self.assertEqual(0, self.sc.execute(self.parser, ['help', 'bar-baz']))
-    self.module.CMDbar_baz.assert_called_once_with(self.parser, ['--help'])
-    self.assertEqual('bar-baz documentation\n\n', self.parser.description)
-    self.parser.set_usage.assert_called_once_with(
-        'usage: %prog bar-baz [options]')
-
-  def testExecute_CommandNotFound(self):
-    self._setUpTestCommands()
-
-    self.assertEqual(0, self.sc.execute(self.parser, ['not-found']))
-    self.module.CMDhelp.assert_called_once_with(self.parser, [])
-    self.assertEqual(
-        'Module documentation\n\n'
-        'Commands are:\n'
-        '  bar-baz bar-baz documentation\n'
-        '  foo     foo documentation\n'
-        '  help    help documentation\n',
-        self.parser.description)
-    self.parser.set_usage.assert_called_once_with(
-        'usage: %prog <command> [options]')
-
-  def testExecute_CommandNotFoundAndHelpDisabled(self):
-    self._setUpTestCommands()
-    self.module.CMDhelp = None
-
-    self.assertEqual(2, self.sc.execute(self.parser, ['not-found']))
+    def setUp(self):
+        super(SubcommandTest, self).setUp()
+        self.module = mock.Mock(__doc__='Module documentation')
+        self.parser = mock.Mock()
+        self.sc = subcommand.CommandDispatcher(__name__)
+        self.sc.module = self.module
+
+    def testEnumerateCommands(self):
+        self.module.CMDfoo_bar = object()
+        self.module.CMDbaz = object()
+        self.module.CMDaBcDeF_0123 = object()
+
+        expected = {
+            'foo-bar': self.module.CMDfoo_bar,
+            'baz': self.module.CMDbaz,
+            'aBcDeF-0123': self.module.CMDaBcDeF_0123,
+            'help': subcommand.CMDhelp,
+        }
+        self.assertEqual(expected, self.sc.enumerate_commands())
+
+    def testEnumerateCommands_CustomHelp(self):
+        self.module.CMDhelp = object()
+        self.assertEqual({'help': self.module.CMDhelp},
+                         self.sc.enumerate_commands())
+
+    def testFindNearestCommand_ExactMatch(self):
+        self.module.CMDfoo = object()
+        self.assertEqual(self.module.CMDfoo,
+                         self.sc.find_nearest_command('foo'))
+
+    def testFindNearestCommand_UniquePrefix(self):
+        self.module.CMDfoo = object()
+        self.module.CMDbar = object()
+        self.module.CMDunique_prefix = object()
+        self.assertEqual(self.module.CMDunique_prefix,
+                         self.sc.find_nearest_command('unique-pre'))
+
+    def testFindNearestCommand_NonUniquePrefix(self):
+        self.module.CMDprefix1 = object()
+        self.module.CMDprefix2 = object()
+        self.assertIsNone(self.sc.find_nearest_command('prefix'))
+
+    def testFindNearestCommand_CloseEnough(self):
+        self.module.CMDfoo = object()
+        self.module.CMDbar = object()
+        self.module.CMDclose_enough = object()
+        self.assertEqual(self.module.CMDclose_enough,
+                         self.sc.find_nearest_command('clos-enough'))
+
+    def testFindNearestCommand_TooManyCloseEnough(self):
+        self.module.CMDcase_enough = object()
+        self.module.CMDclose_enough = object()
+        self.assertIsNone(self.sc.find_nearest_command('clase-enough'))
+
+    def testFindNearestCommand_ClosestIsNotCloseEnough(self):
+        self.module.CMDfoo = object()
+        self.module.CMDbar = object()
+        self.module.CMDnot_close_enough = object()
+        self.assertIsNone(self.sc.find_nearest_command('clos-enof'))
+
+    def _setUpTestCommands(self):
+        self.module.CMDhelp = mock.Mock(__name__='CMDhelp',
+                                        __doc__='help documentation',
+                                        usage_more=None,
+                                        return_value=0)
+        self.module.CMDfoo = mock.Mock(__name__='CMDfoo',
+                                       __doc__='foo documentation',
+                                       usage_more='usage more',
+                                       epilog='epilog',
+                                       return_value=123)
+        self.module.CMDbar_baz = mock.Mock(__name__='CMDbar_baz',
+                                           __doc__='bar-baz documentation',
+                                           usage_more=None,
+                                           epilog=None,
+                                           return_value=0)
+
+    def testExecute(self):
+        self._setUpTestCommands()
+
+        self.assertEqual(
+            123, self.sc.execute(self.parser, ['foo', '--bar', '--baz']))
+        self.module.CMDfoo.assert_called_once_with(self.parser,
+                                                   ['--bar', '--baz'])
+        self.assertEqual('foo documentation\n\n', self.parser.description)
+        self.assertEqual('\nepilog\n', self.parser.epilog)
+        self.parser.set_usage.assert_called_once_with(
+            'usage: %prog foo [options] usage more')
+
+    def testExecute_Help(self):
+        self._setUpTestCommands()
+
+        self.assertEqual(0, self.sc.execute(self.parser, ['--help']))
+        self.module.CMDhelp.assert_called_once_with(self.parser, [])
+        self.assertEqual(
+            'Module documentation\n\n'
+            'Commands are:\n'
+            '  bar-baz bar-baz documentation\n'
+            '  foo     foo documentation\n'
+            '  help    help documentation\n', self.parser.description)
+        self.parser.set_usage.assert_called_once_with(
+            'usage: %prog <command> [options]')
+
+    def testExecute_CommandHelp(self):
+        self._setUpTestCommands()
+
+        self.assertEqual(0, self.sc.execute(self.parser, ['help', 'bar-baz']))
+        self.module.CMDbar_baz.assert_called_once_with(self.parser, ['--help'])
+        self.assertEqual('bar-baz documentation\n\n', self.parser.description)
+        self.parser.set_usage.assert_called_once_with(
+            'usage: %prog bar-baz [options]')
+
+    def testExecute_CommandNotFound(self):
+        self._setUpTestCommands()
+
+        self.assertEqual(0, self.sc.execute(self.parser, ['not-found']))
+        self.module.CMDhelp.assert_called_once_with(self.parser, [])
+        self.assertEqual(
+            'Module documentation\n\n'
+            'Commands are:\n'
+            '  bar-baz bar-baz documentation\n'
+            '  foo     foo documentation\n'
+            '  help    help documentation\n', self.parser.description)
+        self.parser.set_usage.assert_called_once_with(
+            'usage: %prog <command> [options]')
+
+    def testExecute_CommandNotFoundAndHelpDisabled(self):
+        self._setUpTestCommands()
+        self.module.CMDhelp = None
+
+        self.assertEqual(2, self.sc.execute(self.parser, ['not-found']))
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 243 - 237
tests/subprocess2_test.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for subprocess2.py."""
 
 import os
@@ -25,252 +24,259 @@ TEST_COMMAND = [
 
 
 class DefaultsTest(unittest.TestCase):
-  @mock.patch('subprocess2.communicate')
-  def test_check_call_defaults(self, mockCommunicate):
-    mockCommunicate.return_value = (('stdout', 'stderr'), 0)
-    self.assertEqual(
-        ('stdout', 'stderr'), subprocess2.check_call_out(['foo'], a=True))
-    mockCommunicate.assert_called_with(['foo'], a=True)
-
-  @mock.patch('subprocess2.communicate')
-  def test_capture_defaults(self, mockCommunicate):
-    mockCommunicate.return_value = (('stdout', 'stderr'), 0)
-    self.assertEqual(
-        'stdout', subprocess2.capture(['foo'], a=True))
-    mockCommunicate.assert_called_with(
-        ['foo'], a=True, stdin=subprocess2.DEVNULL, stdout=subprocess2.PIPE)
-
-  @mock.patch('subprocess2.Popen')
-  def test_communicate_defaults(self, mockPopen):
-    mockPopen().communicate.return_value = ('bar', 'baz')
-    mockPopen().returncode = -8
-    self.assertEqual(
-        (('bar', 'baz'), -8), subprocess2.communicate(['foo'], a=True))
-    mockPopen.assert_called_with(['foo'], a=True)
-
-  @mock.patch('os.environ', {})
-  @mock.patch('subprocess.Popen.__init__')
-  def test_Popen_defaults(self, mockPopen):
-    with mock.patch('sys.platform', 'win32'):
-      subprocess2.Popen(['foo'], a=True)
-      mockPopen.assert_called_with(['foo'], a=True, shell=True)
-
-    with mock.patch('sys.platform', 'non-win32'):
-      subprocess2.Popen(['foo'], a=True)
-      mockPopen.assert_called_with(['foo'], a=True, shell=False)
-
-  def test_get_english_env(self):
-    with mock.patch('sys.platform', 'win32'):
-      self.assertIsNone(subprocess2.get_english_env({}))
-
-    with mock.patch('sys.platform', 'non-win32'):
-      self.assertIsNone(subprocess2.get_english_env({}))
-      self.assertIsNone(
-          subprocess2.get_english_env({'LANG': 'en_XX', 'LANGUAGE': 'en_YY'}))
-      self.assertEqual(
-          {'LANG': 'en_US.UTF-8', 'LANGUAGE': 'en_US.UTF-8'},
-          subprocess2.get_english_env({'LANG': 'bar', 'LANGUAGE': 'baz'}))
-
-  @mock.patch('subprocess2.communicate')
-  def test_check_output_defaults(self, mockCommunicate):
-    mockCommunicate.return_value = (('stdout', 'stderr'), 0)
-    self.assertEqual('stdout', subprocess2.check_output(['foo'], a=True))
-    mockCommunicate.assert_called_with(
-        ['foo'], a=True, stdin=subprocess2.DEVNULL, stdout=subprocess2.PIPE)
-
-  @mock.patch('subprocess.Popen.__init__')
-  def test_env_type(self, mockPopen):
-    subprocess2.Popen(['foo'], env={b'key': b'value'})
-    mockPopen.assert_called_with(['foo'], env={'key': 'value'}, shell=mock.ANY)
+    @mock.patch('subprocess2.communicate')
+    def test_check_call_defaults(self, mockCommunicate):
+        mockCommunicate.return_value = (('stdout', 'stderr'), 0)
+        self.assertEqual(('stdout', 'stderr'),
+                         subprocess2.check_call_out(['foo'], a=True))
+        mockCommunicate.assert_called_with(['foo'], a=True)
+
+    @mock.patch('subprocess2.communicate')
+    def test_capture_defaults(self, mockCommunicate):
+        mockCommunicate.return_value = (('stdout', 'stderr'), 0)
+        self.assertEqual('stdout', subprocess2.capture(['foo'], a=True))
+        mockCommunicate.assert_called_with(['foo'],
+                                           a=True,
+                                           stdin=subprocess2.DEVNULL,
+                                           stdout=subprocess2.PIPE)
+
+    @mock.patch('subprocess2.Popen')
+    def test_communicate_defaults(self, mockPopen):
+        mockPopen().communicate.return_value = ('bar', 'baz')
+        mockPopen().returncode = -8
+        self.assertEqual((('bar', 'baz'), -8),
+                         subprocess2.communicate(['foo'], a=True))
+        mockPopen.assert_called_with(['foo'], a=True)
+
+    @mock.patch('os.environ', {})
+    @mock.patch('subprocess.Popen.__init__')
+    def test_Popen_defaults(self, mockPopen):
+        with mock.patch('sys.platform', 'win32'):
+            subprocess2.Popen(['foo'], a=True)
+            mockPopen.assert_called_with(['foo'], a=True, shell=True)
+
+        with mock.patch('sys.platform', 'non-win32'):
+            subprocess2.Popen(['foo'], a=True)
+            mockPopen.assert_called_with(['foo'], a=True, shell=False)
+
+    def test_get_english_env(self):
+        with mock.patch('sys.platform', 'win32'):
+            self.assertIsNone(subprocess2.get_english_env({}))
+
+        with mock.patch('sys.platform', 'non-win32'):
+            self.assertIsNone(subprocess2.get_english_env({}))
+            self.assertIsNone(
+                subprocess2.get_english_env({
+                    'LANG': 'en_XX',
+                    'LANGUAGE': 'en_YY'
+                }))
+            self.assertEqual({
+                'LANG': 'en_US.UTF-8',
+                'LANGUAGE': 'en_US.UTF-8'
+            }, subprocess2.get_english_env({
+                'LANG': 'bar',
+                'LANGUAGE': 'baz'
+            }))
+
+    @mock.patch('subprocess2.communicate')
+    def test_check_output_defaults(self, mockCommunicate):
+        mockCommunicate.return_value = (('stdout', 'stderr'), 0)
+        self.assertEqual('stdout', subprocess2.check_output(['foo'], a=True))
+        mockCommunicate.assert_called_with(['foo'],
+                                           a=True,
+                                           stdin=subprocess2.DEVNULL,
+                                           stdout=subprocess2.PIPE)
+
+    @mock.patch('subprocess.Popen.__init__')
+    def test_env_type(self, mockPopen):
+        subprocess2.Popen(['foo'], env={b'key': b'value'})
+        mockPopen.assert_called_with(['foo'],
+                                     env={'key': 'value'},
+                                     shell=mock.ANY)
 
 
 def _run_test(with_subprocess=True):
-  """Runs a tests in 12 combinations:
+    """Runs a tests in 12 combinations:
   - With universal_newlines=True and False.
   - With LF, CR, and CRLF output.
   - With subprocess and subprocess2.
   """
-  subps = (subprocess2, subprocess) if with_subprocess else (subprocess2,)
-  no_op = lambda s: s
-  to_bytes = lambda s: s.encode()
-  to_cr_bytes = lambda s: s.replace('\n', '\r').encode()
-  to_crlf_bytes = lambda s: s.replace('\n', '\r\n').encode()
-  def wrapper(test):
-    def inner(self):
-      for subp in subps:
-        # universal_newlines = False
-        test(self, to_bytes, TEST_COMMAND, False, subp)
-        test(self, to_cr_bytes, TEST_COMMAND + ['--cr'], False, subp)
-        test(self, to_crlf_bytes, TEST_COMMAND + ['--crlf'], False, subp)
-        # universal_newlines = True
-        test(self, no_op, TEST_COMMAND, True, subp)
-        test(self, no_op, TEST_COMMAND + ['--cr'], True, subp)
-        test(self, no_op, TEST_COMMAND + ['--crlf'], True, subp)
-
-    return inner
-  return wrapper
+    subps = (subprocess2, subprocess) if with_subprocess else (subprocess2, )
+    no_op = lambda s: s
+    to_bytes = lambda s: s.encode()
+    to_cr_bytes = lambda s: s.replace('\n', '\r').encode()
+    to_crlf_bytes = lambda s: s.replace('\n', '\r\n').encode()
+
+    def wrapper(test):
+        def inner(self):
+            for subp in subps:
+                # universal_newlines = False
+                test(self, to_bytes, TEST_COMMAND, False, subp)
+                test(self, to_cr_bytes, TEST_COMMAND + ['--cr'], False, subp)
+                test(self, to_crlf_bytes, TEST_COMMAND + ['--crlf'], False,
+                     subp)
+                # universal_newlines = True
+                test(self, no_op, TEST_COMMAND, True, subp)
+                test(self, no_op, TEST_COMMAND + ['--cr'], True, subp)
+                test(self, no_op, TEST_COMMAND + ['--crlf'], True, subp)
+
+        return inner
+
+    return wrapper
 
 
 class SmokeTests(unittest.TestCase):
-  # Regression tests to ensure that subprocess and subprocess2 have the same
-  # behavior.
-  def _check_res(self, res, stdout, stderr, returncode):
-    (out, err), code = res
-    self.assertEqual(stdout, out)
-    self.assertEqual(stderr, err)
-    self.assertEqual(returncode, code)
-
-  def _check_exception(self, subp, e, stdout, stderr, returncode):
-    """On exception, look if the exception members are set correctly."""
-    self.assertEqual(returncode, e.returncode)
-    self.assertEqual(stdout, e.stdout)
-    self.assertEqual(stderr, e.stderr)
-
-  def test_check_output_no_stdout(self):
-    for subp in (subprocess, subprocess2):
-      with self.assertRaises(ValueError):
-        # pylint: disable=unexpected-keyword-arg
-        subp.check_output(TEST_COMMAND, stdout=subp.PIPE)
-
-  def test_print_exception(self):
-    with self.assertRaises(subprocess2.CalledProcessError) as e:
-      subprocess2.check_output(TEST_COMMAND + ['--fail', '--stdout'])
-    exception_str = str(e.exception)
-    # Windows escapes backslashes so check only filename
-    self.assertIn(TEST_FILENAME + ' --fail --stdout', exception_str)
-    self.assertIn(str(e.exception.returncode), exception_str)
-    self.assertIn(e.exception.stdout.decode('utf-8', 'ignore'), exception_str)
-
-  @_run_test()
-  def test_check_output_throw_stdout(self, c, cmd, un, subp):
-    with self.assertRaises(subp.CalledProcessError) as e:
-      subp.check_output(
-          cmd + ['--fail', '--stdout'], universal_newlines=un)
-    self._check_exception(subp, e.exception, c('A\nBB\nCCC\n'), None, 64)
-
-  @_run_test()
-  def test_check_output_throw_no_stderr(self, c, cmd, un, subp):
-    with self.assertRaises(subp.CalledProcessError) as e:
-      subp.check_output(
-          cmd + ['--fail', '--stderr'], universal_newlines=un)
-    self._check_exception(subp, e.exception, c(''), None, 64)
-
-  @_run_test()
-  def test_check_output_throw_stderr(self, c, cmd, un, subp):
-    with self.assertRaises(subp.CalledProcessError) as e:
-      subp.check_output(
-          cmd + ['--fail', '--stderr'],
-          stderr=subp.PIPE,
-          universal_newlines=un)
-    self._check_exception(subp, e.exception, c(''), c('a\nbb\nccc\n'), 64)
-
-  @_run_test()
-  def test_check_output_throw_stderr_stdout(self, c, cmd, un, subp):
-    with self.assertRaises(subp.CalledProcessError) as e:
-      subp.check_output(
-          cmd + ['--fail', '--stderr'],
-          stderr=subp.STDOUT,
-          universal_newlines=un)
-    self._check_exception(subp, e.exception, c('a\nbb\nccc\n'), None, 64)
-
-  def test_check_call_throw(self):
-    for subp in (subprocess, subprocess2):
-      with self.assertRaises(subp.CalledProcessError) as e:
-        subp.check_call(TEST_COMMAND + ['--fail', '--stderr'])
-      self._check_exception(subp, e.exception, None, None, 64)
-
-  @_run_test()
-  def test_redirect_stderr_to_stdout_pipe(self, c, cmd, un, subp):
-    # stderr output into stdout.
-    proc = subp.Popen(
-        cmd + ['--stderr'],
-        stdout=subp.PIPE,
-        stderr=subp.STDOUT,
-        universal_newlines=un)
-    res = proc.communicate(), proc.returncode
-    self._check_res(res, c('a\nbb\nccc\n'), None, 0)
-
-  @_run_test()
-  def test_redirect_stderr_to_stdout(self, c, cmd, un, subp):
-    # stderr output into stdout but stdout is not piped.
-    proc = subp.Popen(
-        cmd + ['--stderr'], stderr=subprocess2.STDOUT, universal_newlines=un)
-    res = proc.communicate(), proc.returncode
-    self._check_res(res, None, None, 0)
-
-  @_run_test()
-  def test_stderr(self, c, cmd, un, subp):
-    cmd = ['expr', '1', '/', '0']
-    if sys.platform == 'win32':
-      cmd = ['cmd.exe', '/c', 'exit', '1']
-    p1 = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=False)
-    p2 = subprocess2.Popen(cmd, stderr=subprocess.PIPE, shell=False)
-    r1 = p1.communicate()
-    r2 = p2.communicate()
-    self.assertEqual(r1, r2)
-
-  @_run_test(with_subprocess=False)
-  def test_stdin(self, c, cmd, un, subp):
-    stdin = c('0123456789')
-    res = subprocess2.communicate(
-        cmd + ['--read'],
-        stdin=stdin,
-        universal_newlines=un)
-    self._check_res(res, None, None, 10)
-
-  @_run_test(with_subprocess=False)
-  def test_stdin_empty(self, c, cmd, un, subp):
-    stdin = c('')
-    res = subprocess2.communicate(
-        cmd + ['--read'],
-        stdin=stdin,
-        universal_newlines=un)
-    self._check_res(res, None, None, 0)
-
-  def test_stdin_void(self):
-    res = subprocess2.communicate(
-        TEST_COMMAND + ['--read'],
-        stdin=subprocess2.DEVNULL)
-    self._check_res(res, None, None, 0)
-
-  @_run_test(with_subprocess=False)
-  def test_stdin_void_stdout(self, c, cmd, un, subp):
-    # Make sure a mix ofsubprocess2.DEVNULL andsubprocess2.PIPE works.
-    res = subprocess2.communicate(
-        cmd + ['--stdout', '--read'],
-        stdin=subprocess2.DEVNULL,
-        stdout=subprocess2.PIPE,
-        universal_newlines=un,
-        shell=False)
-    self._check_res(res, c('A\nBB\nCCC\n'), None, 0)
-
-  @_run_test(with_subprocess=False)
-  def test_stdout_void(self, c, cmd, un, subp):
-    res = subprocess2.communicate(
-        cmd + ['--stdout', '--stderr'],
-        stdout=subprocess2.DEVNULL,
-        stderr=subprocess2.PIPE,
-        universal_newlines=un)
-    self._check_res(res, None, c('a\nbb\nccc\n'), 0)
-
-  @_run_test(with_subprocess=False)
-  def test_stderr_void(self, c, cmd, un, subp):
-    res = subprocess2.communicate(
-        cmd + ['--stdout', '--stderr'],
-        stdout=subprocess2.PIPE,
-        stderr=subprocess2.DEVNULL,
-        universal_newlines=un)
-    self._check_res(res, c('A\nBB\nCCC\n'), None, 0)
-
-  @_run_test(with_subprocess=False)
-  def test_stdout_void_stderr_redirect(self, c, cmd, un, subp):
-    res = subprocess2.communicate(
-        cmd + ['--stdout', '--stderr'],
-        stdout=subprocess2.DEVNULL,
-        stderr=subprocess2.STDOUT,
-        universal_newlines=un)
-    self._check_res(res, None, None, 0)
+    # Regression tests to ensure that subprocess and subprocess2 have the same
+    # behavior.
+    def _check_res(self, res, stdout, stderr, returncode):
+        (out, err), code = res
+        self.assertEqual(stdout, out)
+        self.assertEqual(stderr, err)
+        self.assertEqual(returncode, code)
+
+    def _check_exception(self, subp, e, stdout, stderr, returncode):
+        """On exception, look if the exception members are set correctly."""
+        self.assertEqual(returncode, e.returncode)
+        self.assertEqual(stdout, e.stdout)
+        self.assertEqual(stderr, e.stderr)
+
+    def test_check_output_no_stdout(self):
+        for subp in (subprocess, subprocess2):
+            with self.assertRaises(ValueError):
+                # pylint: disable=unexpected-keyword-arg
+                subp.check_output(TEST_COMMAND, stdout=subp.PIPE)
+
+    def test_print_exception(self):
+        with self.assertRaises(subprocess2.CalledProcessError) as e:
+            subprocess2.check_output(TEST_COMMAND + ['--fail', '--stdout'])
+        exception_str = str(e.exception)
+        # Windows escapes backslashes so check only filename
+        self.assertIn(TEST_FILENAME + ' --fail --stdout', exception_str)
+        self.assertIn(str(e.exception.returncode), exception_str)
+        self.assertIn(e.exception.stdout.decode('utf-8', 'ignore'),
+                      exception_str)
+
+    @_run_test()
+    def test_check_output_throw_stdout(self, c, cmd, un, subp):
+        with self.assertRaises(subp.CalledProcessError) as e:
+            subp.check_output(cmd + ['--fail', '--stdout'],
+                              universal_newlines=un)
+        self._check_exception(subp, e.exception, c('A\nBB\nCCC\n'), None, 64)
+
+    @_run_test()
+    def test_check_output_throw_no_stderr(self, c, cmd, un, subp):
+        with self.assertRaises(subp.CalledProcessError) as e:
+            subp.check_output(cmd + ['--fail', '--stderr'],
+                              universal_newlines=un)
+        self._check_exception(subp, e.exception, c(''), None, 64)
+
+    @_run_test()
+    def test_check_output_throw_stderr(self, c, cmd, un, subp):
+        with self.assertRaises(subp.CalledProcessError) as e:
+            subp.check_output(cmd + ['--fail', '--stderr'],
+                              stderr=subp.PIPE,
+                              universal_newlines=un)
+        self._check_exception(subp, e.exception, c(''), c('a\nbb\nccc\n'), 64)
+
+    @_run_test()
+    def test_check_output_throw_stderr_stdout(self, c, cmd, un, subp):
+        with self.assertRaises(subp.CalledProcessError) as e:
+            subp.check_output(cmd + ['--fail', '--stderr'],
+                              stderr=subp.STDOUT,
+                              universal_newlines=un)
+        self._check_exception(subp, e.exception, c('a\nbb\nccc\n'), None, 64)
+
+    def test_check_call_throw(self):
+        for subp in (subprocess, subprocess2):
+            with self.assertRaises(subp.CalledProcessError) as e:
+                subp.check_call(TEST_COMMAND + ['--fail', '--stderr'])
+            self._check_exception(subp, e.exception, None, None, 64)
+
+    @_run_test()
+    def test_redirect_stderr_to_stdout_pipe(self, c, cmd, un, subp):
+        # stderr output into stdout.
+        proc = subp.Popen(cmd + ['--stderr'],
+                          stdout=subp.PIPE,
+                          stderr=subp.STDOUT,
+                          universal_newlines=un)
+        res = proc.communicate(), proc.returncode
+        self._check_res(res, c('a\nbb\nccc\n'), None, 0)
+
+    @_run_test()
+    def test_redirect_stderr_to_stdout(self, c, cmd, un, subp):
+        # stderr output into stdout but stdout is not piped.
+        proc = subp.Popen(cmd + ['--stderr'],
+                          stderr=subprocess2.STDOUT,
+                          universal_newlines=un)
+        res = proc.communicate(), proc.returncode
+        self._check_res(res, None, None, 0)
+
+    @_run_test()
+    def test_stderr(self, c, cmd, un, subp):
+        cmd = ['expr', '1', '/', '0']
+        if sys.platform == 'win32':
+            cmd = ['cmd.exe', '/c', 'exit', '1']
+        p1 = subprocess.Popen(cmd, stderr=subprocess.PIPE, shell=False)
+        p2 = subprocess2.Popen(cmd, stderr=subprocess.PIPE, shell=False)
+        r1 = p1.communicate()
+        r2 = p2.communicate()
+        self.assertEqual(r1, r2)
+
+    @_run_test(with_subprocess=False)
+    def test_stdin(self, c, cmd, un, subp):
+        stdin = c('0123456789')
+        res = subprocess2.communicate(cmd + ['--read'],
+                                      stdin=stdin,
+                                      universal_newlines=un)
+        self._check_res(res, None, None, 10)
+
+    @_run_test(with_subprocess=False)
+    def test_stdin_empty(self, c, cmd, un, subp):
+        stdin = c('')
+        res = subprocess2.communicate(cmd + ['--read'],
+                                      stdin=stdin,
+                                      universal_newlines=un)
+        self._check_res(res, None, None, 0)
+
+    def test_stdin_void(self):
+        res = subprocess2.communicate(TEST_COMMAND + ['--read'],
+                                      stdin=subprocess2.DEVNULL)
+        self._check_res(res, None, None, 0)
+
+    @_run_test(with_subprocess=False)
+    def test_stdin_void_stdout(self, c, cmd, un, subp):
+        # Make sure a mix ofsubprocess2.DEVNULL andsubprocess2.PIPE works.
+        res = subprocess2.communicate(cmd + ['--stdout', '--read'],
+                                      stdin=subprocess2.DEVNULL,
+                                      stdout=subprocess2.PIPE,
+                                      universal_newlines=un,
+                                      shell=False)
+        self._check_res(res, c('A\nBB\nCCC\n'), None, 0)
+
+    @_run_test(with_subprocess=False)
+    def test_stdout_void(self, c, cmd, un, subp):
+        res = subprocess2.communicate(cmd + ['--stdout', '--stderr'],
+                                      stdout=subprocess2.DEVNULL,
+                                      stderr=subprocess2.PIPE,
+                                      universal_newlines=un)
+        self._check_res(res, None, c('a\nbb\nccc\n'), 0)
+
+    @_run_test(with_subprocess=False)
+    def test_stderr_void(self, c, cmd, un, subp):
+        res = subprocess2.communicate(cmd + ['--stdout', '--stderr'],
+                                      stdout=subprocess2.PIPE,
+                                      stderr=subprocess2.DEVNULL,
+                                      universal_newlines=un)
+        self._check_res(res, c('A\nBB\nCCC\n'), None, 0)
+
+    @_run_test(with_subprocess=False)
+    def test_stdout_void_stderr_redirect(self, c, cmd, un, subp):
+        res = subprocess2.communicate(cmd + ['--stdout', '--stderr'],
+                                      stdout=subprocess2.DEVNULL,
+                                      stderr=subprocess2.STDOUT,
+                                      universal_newlines=un)
+        self._check_res(res, None, None, 0)
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 151 - 162
tests/upload_to_google_storage_unittest.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2012 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for upload_to_google_storage.py."""
 
 import optparse
@@ -25,175 +24,165 @@ from download_from_google_storage_unittest import ChangedWorkingDirectory
 from third_party import six
 
 if six.PY2:
-  from cStringIO import StringIO
+    from cStringIO import StringIO
 else:
-  from io import StringIO
-
+    from io import StringIO
 
 # ../third_party/gsutil/gsutil
 GSUTIL_DEFAULT_PATH = os.path.join(
-    os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
-    'third_party', 'gsutil', 'gsutil')
+    os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'third_party',
+    'gsutil', 'gsutil')
 TEST_DIR = os.path.dirname(os.path.abspath(__file__))
 
 
 class UploadTests(unittest.TestCase):
-  def setUp(self):
-    self.gsutil = GsutilMock(GSUTIL_DEFAULT_PATH, None)
-    self.temp_dir = tempfile.mkdtemp(prefix='gstools_test')
-    self.base_path = os.path.join(self.temp_dir, 'gstools')
-    shutil.copytree(os.path.join(TEST_DIR, 'gstools'), self.base_path)
-    self.base_url = 'gs://sometesturl'
-    self.parser = optparse.OptionParser()
-    self.ret_codes = queue.Queue()
-    self.stdout_queue = queue.Queue()
-    self.lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
-    self.lorem_ipsum_sha1 = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
-
-  def tearDown(self):
-    shutil.rmtree(self.temp_dir)
-    sys.stdin = sys.__stdin__
-
-  def test_upload_single_file(self):
-    filenames = [self.lorem_ipsum]
-    output_filename = '%s.sha1'  % self.lorem_ipsum
-    code = upload_to_google_storage.upload_to_google_storage(
-        filenames, self.base_url, self.gsutil, True, False, 1, False, 'txt')
-    self.assertEqual(
-        self.gsutil.history,
-        [('check_call',
-          ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
-         ('check_call',
-          ('-h', 'Cache-Control:public, max-age=31536000', 'cp', '-z', 'txt',
-           filenames[0], '%s/%s' % (self.base_url, self.lorem_ipsum_sha1)))])
-    self.assertTrue(os.path.exists(output_filename))
-    self.assertEqual(
-        open(output_filename, 'rb').read().decode(),
-        '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
-    os.remove(output_filename)
-    self.assertEqual(code, 0)
-
-  def test_create_archive(self):
-    work_dir = os.path.join(self.base_path, 'download_test_data')
-    with ChangedWorkingDirectory(work_dir):
-      dirname = 'subfolder'
-      dirs = [dirname]
-      tar_gz_file = '%s.tar.gz' % dirname
-      self.assertTrue(upload_to_google_storage.validate_archive_dirs(dirs))
-      upload_to_google_storage.create_archives(dirs)
-      self.assertTrue(os.path.exists(tar_gz_file))
-      with tarfile.open(tar_gz_file, 'r:gz') as tar:
-        content = map(lambda x: x.name, tar.getmembers())
-        self.assertIn(dirname, content)
-        self.assertIn(posixpath.join(dirname, 'subfolder_text.txt'), content)
-        self.assertIn(
-            posixpath.join(dirname, 'subfolder_text.txt.sha1'), content)
-
-  @unittest.skipIf(sys.platform == 'win32', 'os.symlink does not exist on win')
-  def test_validate_archive_dirs_fails(self):
-    work_dir = os.path.join(self.base_path, 'download_test_data')
-    with ChangedWorkingDirectory(work_dir):
-      symlink = 'link'
-      os.symlink(os.path.join(self.base_path, 'subfolder'), symlink)
-    self.assertFalse(upload_to_google_storage.validate_archive_dirs([symlink]))
-    self.assertFalse(upload_to_google_storage.validate_archive_dirs(['foobar']))
-
-  def test_upload_single_file_remote_exists(self):
-    filenames = [self.lorem_ipsum]
-    output_filename = '%s.sha1'  % self.lorem_ipsum
-    etag_string = b'ETag: 634d7c1ed3545383837428f031840a1e'
-    self.gsutil.add_expected(0, b'', b'')
-    self.gsutil.add_expected(0, etag_string, b'')
-    code = upload_to_google_storage.upload_to_google_storage(
-        filenames, self.base_url, self.gsutil, False, False, 1, False, None)
-    self.assertEqual(
-        self.gsutil.history,
-        [('check_call',
-          ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
-         ('check_call',
-          ('ls', '-L', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1)))])
-    self.assertTrue(os.path.exists(output_filename))
-    self.assertEqual(
-        open(output_filename, 'rb').read().decode(),
-        '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
-    os.remove(output_filename)
-    self.assertEqual(code, 0)
-
-  def test_upload_worker_errors(self):
-    work_queue = queue.Queue()
-    work_queue.put((self.lorem_ipsum, self.lorem_ipsum_sha1))
-    work_queue.put((None, None))
-    self.gsutil.add_expected(1, '', '')  # For the first ls call.
-    self.gsutil.add_expected(20, '', 'Expected error message')
-    # pylint: disable=protected-access
-    upload_to_google_storage._upload_worker(
-        0,
-        work_queue,
-        self.base_url,
-        self.gsutil,
-        threading.Lock(),
-        False,
-        False,
-        self.stdout_queue,
-        self.ret_codes,
-        None)
-    expected_ret_codes = [
-      (20,
-       'Encountered error on uploading %s to %s/%s\nExpected error message' %
-          (self.lorem_ipsum, self.base_url, self.lorem_ipsum_sha1))]
-    self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
-
-  def test_skip_hashing(self):
-    filenames = [self.lorem_ipsum]
-    output_filename = '%s.sha1' % self.lorem_ipsum
-    fake_hash = '6871c8e24da15bad8b0be2c36edc9dc77e37727f'
-    with open(output_filename, 'wb') as f:
-      f.write(fake_hash.encode())  # Fake hash.
-    code = upload_to_google_storage.upload_to_google_storage(
-        filenames, self.base_url, self.gsutil, False, False, 1, True, None)
-    self.assertEqual(
-        self.gsutil.history,
-        [('check_call', ('ls', '%s/%s' % (self.base_url, fake_hash))),
-         ('check_call', ('ls', '-L', '%s/%s' % (self.base_url, fake_hash))),
-         ('check_call', ('-h', 'Cache-Control:public, max-age=31536000', 'cp',
-                         filenames[0], '%s/%s' % (self.base_url, fake_hash)))])
-    self.assertEqual(
-        open(output_filename, 'rb').read().decode(), fake_hash)
-    os.remove(output_filename)
-    self.assertEqual(code, 0)
-
-  def test_get_targets_no_args(self):
-    try:
-      upload_to_google_storage.get_targets([], self.parser, False)
-      self.fail()
-    except SystemExit as e:
-      self.assertEqual(e.code, 2)
-
-  def test_get_targets_passthrough(self):
-    result = upload_to_google_storage.get_targets(
-        ['a', 'b', 'c', 'd', 'e'],
-        self.parser,
-        False)
-    self.assertEqual(result, ['a', 'b', 'c', 'd', 'e'])
-
-  def test_get_targets_multiple_stdin(self):
-    inputs = ['a', 'b', 'c', 'd', 'e']
-    sys.stdin = StringIO(os.linesep.join(inputs))
-    result = upload_to_google_storage.get_targets(
-        ['-'],
-        self.parser,
-        False)
-    self.assertEqual(result, inputs)
-
-  def test_get_targets_multiple_stdin_null(self):
-    inputs = ['a', 'b', 'c', 'd', 'e']
-    sys.stdin = StringIO('\0'.join(inputs))
-    result = upload_to_google_storage.get_targets(
-        ['-'],
-        self.parser,
-        True)
-    self.assertEqual(result, inputs)
+    def setUp(self):
+        self.gsutil = GsutilMock(GSUTIL_DEFAULT_PATH, None)
+        self.temp_dir = tempfile.mkdtemp(prefix='gstools_test')
+        self.base_path = os.path.join(self.temp_dir, 'gstools')
+        shutil.copytree(os.path.join(TEST_DIR, 'gstools'), self.base_path)
+        self.base_url = 'gs://sometesturl'
+        self.parser = optparse.OptionParser()
+        self.ret_codes = queue.Queue()
+        self.stdout_queue = queue.Queue()
+        self.lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
+        self.lorem_ipsum_sha1 = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
+
+    def tearDown(self):
+        shutil.rmtree(self.temp_dir)
+        sys.stdin = sys.__stdin__
+
+    def test_upload_single_file(self):
+        filenames = [self.lorem_ipsum]
+        output_filename = '%s.sha1' % self.lorem_ipsum
+        code = upload_to_google_storage.upload_to_google_storage(
+            filenames, self.base_url, self.gsutil, True, False, 1, False, 'txt')
+        self.assertEqual(self.gsutil.history, [
+            ('check_call',
+             ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
+            ('check_call',
+             ('-h', 'Cache-Control:public, max-age=31536000', 'cp', '-z', 'txt',
+              filenames[0], '%s/%s' % (self.base_url, self.lorem_ipsum_sha1)))
+        ])
+        self.assertTrue(os.path.exists(output_filename))
+        self.assertEqual(
+            open(output_filename, 'rb').read().decode(),
+            '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
+        os.remove(output_filename)
+        self.assertEqual(code, 0)
+
+    def test_create_archive(self):
+        work_dir = os.path.join(self.base_path, 'download_test_data')
+        with ChangedWorkingDirectory(work_dir):
+            dirname = 'subfolder'
+            dirs = [dirname]
+            tar_gz_file = '%s.tar.gz' % dirname
+            self.assertTrue(
+                upload_to_google_storage.validate_archive_dirs(dirs))
+            upload_to_google_storage.create_archives(dirs)
+            self.assertTrue(os.path.exists(tar_gz_file))
+            with tarfile.open(tar_gz_file, 'r:gz') as tar:
+                content = map(lambda x: x.name, tar.getmembers())
+                self.assertIn(dirname, content)
+                self.assertIn(posixpath.join(dirname, 'subfolder_text.txt'),
+                              content)
+                self.assertIn(
+                    posixpath.join(dirname, 'subfolder_text.txt.sha1'), content)
+
+    @unittest.skipIf(sys.platform == 'win32',
+                     'os.symlink does not exist on win')
+    def test_validate_archive_dirs_fails(self):
+        work_dir = os.path.join(self.base_path, 'download_test_data')
+        with ChangedWorkingDirectory(work_dir):
+            symlink = 'link'
+            os.symlink(os.path.join(self.base_path, 'subfolder'), symlink)
+        self.assertFalse(
+            upload_to_google_storage.validate_archive_dirs([symlink]))
+        self.assertFalse(
+            upload_to_google_storage.validate_archive_dirs(['foobar']))
+
+    def test_upload_single_file_remote_exists(self):
+        filenames = [self.lorem_ipsum]
+        output_filename = '%s.sha1' % self.lorem_ipsum
+        etag_string = b'ETag: 634d7c1ed3545383837428f031840a1e'
+        self.gsutil.add_expected(0, b'', b'')
+        self.gsutil.add_expected(0, etag_string, b'')
+        code = upload_to_google_storage.upload_to_google_storage(
+            filenames, self.base_url, self.gsutil, False, False, 1, False, None)
+        self.assertEqual(
+            self.gsutil.history,
+            [('check_call',
+              ('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
+             ('check_call',
+              ('ls', '-L', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1)))])
+        self.assertTrue(os.path.exists(output_filename))
+        self.assertEqual(
+            open(output_filename, 'rb').read().decode(),
+            '7871c8e24da15bad8b0be2c36edc9dc77e37727f')
+        os.remove(output_filename)
+        self.assertEqual(code, 0)
+
+    def test_upload_worker_errors(self):
+        work_queue = queue.Queue()
+        work_queue.put((self.lorem_ipsum, self.lorem_ipsum_sha1))
+        work_queue.put((None, None))
+        self.gsutil.add_expected(1, '', '')  # For the first ls call.
+        self.gsutil.add_expected(20, '', 'Expected error message')
+        # pylint: disable=protected-access
+        upload_to_google_storage._upload_worker(0, work_queue,
+                                                self.base_url, self.gsutil,
+                                                threading.Lock(), False, False,
+                                                self.stdout_queue,
+                                                self.ret_codes, None)
+        expected_ret_codes = [(
+            20,
+            'Encountered error on uploading %s to %s/%s\nExpected error message'
+            % (self.lorem_ipsum, self.base_url, self.lorem_ipsum_sha1))]
+        self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
+
+    def test_skip_hashing(self):
+        filenames = [self.lorem_ipsum]
+        output_filename = '%s.sha1' % self.lorem_ipsum
+        fake_hash = '6871c8e24da15bad8b0be2c36edc9dc77e37727f'
+        with open(output_filename, 'wb') as f:
+            f.write(fake_hash.encode())  # Fake hash.
+        code = upload_to_google_storage.upload_to_google_storage(
+            filenames, self.base_url, self.gsutil, False, False, 1, True, None)
+        self.assertEqual(
+            self.gsutil.history,
+            [('check_call', ('ls', '%s/%s' % (self.base_url, fake_hash))),
+             ('check_call', ('ls', '-L', '%s/%s' % (self.base_url, fake_hash))),
+             ('check_call',
+              ('-h', 'Cache-Control:public, max-age=31536000', 'cp',
+               filenames[0], '%s/%s' % (self.base_url, fake_hash)))])
+        self.assertEqual(open(output_filename, 'rb').read().decode(), fake_hash)
+        os.remove(output_filename)
+        self.assertEqual(code, 0)
+
+    def test_get_targets_no_args(self):
+        try:
+            upload_to_google_storage.get_targets([], self.parser, False)
+            self.fail()
+        except SystemExit as e:
+            self.assertEqual(e.code, 2)
+
+    def test_get_targets_passthrough(self):
+        result = upload_to_google_storage.get_targets(['a', 'b', 'c', 'd', 'e'],
+                                                      self.parser, False)
+        self.assertEqual(result, ['a', 'b', 'c', 'd', 'e'])
+
+    def test_get_targets_multiple_stdin(self):
+        inputs = ['a', 'b', 'c', 'd', 'e']
+        sys.stdin = StringIO(os.linesep.join(inputs))
+        result = upload_to_google_storage.get_targets(['-'], self.parser, False)
+        self.assertEqual(result, inputs)
+
+    def test_get_targets_multiple_stdin_null(self):
+        inputs = ['a', 'b', 'c', 'd', 'e']
+        sys.stdin = StringIO('\0'.join(inputs))
+        result = upload_to_google_storage.get_targets(['-'], self.parser, True)
+        self.assertEqual(result, inputs)
 
 
 if __name__ == '__main__':
-  unittest.main()
+    unittest.main()

+ 28 - 28
tests/utils_test.py

@@ -17,34 +17,34 @@ import utils
 
 
 class GitCacheTest(unittest.TestCase):
-  def setUp(self):
-    pass
-
-  @mock.patch('subprocess.check_output', lambda x, **kwargs: b'foo')
-  def testVersionWithGit(self):
-    version = utils.depot_tools_version()
-    self.assertEqual(version, 'git-foo')
-
-  @mock.patch('subprocess.check_output')
-  @mock.patch('os.path.getmtime', lambda x: 42)
-  def testVersionWithNoGit(self, mock_subprocess):
-    mock_subprocess.side_effect = Exception
-    version = utils.depot_tools_version()
-    self.assertEqual(version, 'recipes.cfg-42')
-
-  @mock.patch('subprocess.check_output')
-  @mock.patch('os.path.getmtime')
-  def testVersionWithNoGit(self, mock_subprocess, mock_getmtime):
-    mock_subprocess.side_effect = Exception
-    mock_getmtime.side_effect = Exception
-    version = utils.depot_tools_version()
-    self.assertEqual(version, 'unknown')
+    def setUp(self):
+        pass
+
+    @mock.patch('subprocess.check_output', lambda x, **kwargs: b'foo')
+    def testVersionWithGit(self):
+        version = utils.depot_tools_version()
+        self.assertEqual(version, 'git-foo')
+
+    @mock.patch('subprocess.check_output')
+    @mock.patch('os.path.getmtime', lambda x: 42)
+    def testVersionWithNoGit(self, mock_subprocess):
+        mock_subprocess.side_effect = Exception
+        version = utils.depot_tools_version()
+        self.assertEqual(version, 'recipes.cfg-42')
+
+    @mock.patch('subprocess.check_output')
+    @mock.patch('os.path.getmtime')
+    def testVersionWithNoGit(self, mock_subprocess, mock_getmtime):
+        mock_subprocess.side_effect = Exception
+        mock_getmtime.side_effect = Exception
+        version = utils.depot_tools_version()
+        self.assertEqual(version, 'unknown')
 
 
 if __name__ == '__main__':
-  logging.basicConfig(
-      level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
-  sys.exit(
-      coverage_utils.covered_main(
-          (os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py')),
-          required_percentage=0))
+    logging.basicConfig(
+        level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
+    sys.exit(
+        coverage_utils.covered_main(
+            (os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py')),
+            required_percentage=0))

+ 74 - 76
tests/watchlists_unittest.py

@@ -2,7 +2,6 @@
 # Copyright (c) 2011 The Chromium Authors. All rights reserved.
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
-
 """Unit tests for watchlists.py."""
 
 # pylint: disable=E1103,no-value-for-parameter,protected-access
@@ -18,33 +17,32 @@ import watchlists
 
 
 class WatchlistsTest(unittest.TestCase):
-
-  def setUp(self):
-    super(WatchlistsTest, self).setUp()
-    mock.patch('watchlists.Watchlists._HasWatchlistsFile').start()
-    mock.patch('watchlists.Watchlists._ContentsOfWatchlistsFile').start()
-    mock.patch('watchlists.logging.error').start()
-    self.addCleanup(mock.patch.stopall)
-
-  def testMissingWatchlistsFileOK(self):
-    """Test that we act gracefully if WATCHLISTS file is missing."""
-    watchlists.Watchlists._HasWatchlistsFile.return_value = False
-
-    wl = watchlists.Watchlists('/some/random/path')
-    self.assertEqual(wl.GetWatchersForPaths(['some_path']), [])
-
-  def testGarbledWatchlistsFileOK(self):
-    """Test that we act gracefully if WATCHLISTS file is garbled."""
-    contents = 'some garbled and unwanted text'
-    watchlists.Watchlists._HasWatchlistsFile.return_value = True
-    watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
-
-    wl = watchlists.Watchlists('/a/path')
-    self.assertEqual(wl.GetWatchersForPaths(['some_path']), [])
-
-  def testNoWatchers(self):
-    contents = \
-      """{
+    def setUp(self):
+        super(WatchlistsTest, self).setUp()
+        mock.patch('watchlists.Watchlists._HasWatchlistsFile').start()
+        mock.patch('watchlists.Watchlists._ContentsOfWatchlistsFile').start()
+        mock.patch('watchlists.logging.error').start()
+        self.addCleanup(mock.patch.stopall)
+
+    def testMissingWatchlistsFileOK(self):
+        """Test that we act gracefully if WATCHLISTS file is missing."""
+        watchlists.Watchlists._HasWatchlistsFile.return_value = False
+
+        wl = watchlists.Watchlists('/some/random/path')
+        self.assertEqual(wl.GetWatchersForPaths(['some_path']), [])
+
+    def testGarbledWatchlistsFileOK(self):
+        """Test that we act gracefully if WATCHLISTS file is garbled."""
+        contents = 'some garbled and unwanted text'
+        watchlists.Watchlists._HasWatchlistsFile.return_value = True
+        watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
+
+        wl = watchlists.Watchlists('/a/path')
+        self.assertEqual(wl.GetWatchersForPaths(['some_path']), [])
+
+    def testNoWatchers(self):
+        contents = \
+          """{
         'WATCHLIST_DEFINITIONS': {
           'a_module': {
             'filepath': 'a_module',
@@ -55,16 +53,16 @@ class WatchlistsTest(unittest.TestCase):
           'a_module': [],
         },
       } """
-    watchlists.Watchlists._HasWatchlistsFile.return_value = True
-    watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
+        watchlists.Watchlists._HasWatchlistsFile.return_value = True
+        watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
 
-    wl = watchlists.Watchlists('/a/path')
-    self.assertEqual(wl.GetWatchersForPaths(['a_module']), [])
+        wl = watchlists.Watchlists('/a/path')
+        self.assertEqual(wl.GetWatchersForPaths(['a_module']), [])
 
-  def testValidWatcher(self):
-    watchers = ['abc@def.com', 'x1@xyz.org']
-    contents = \
-      """{
+    def testValidWatcher(self):
+        watchers = ['abc@def.com', 'x1@xyz.org']
+        contents = \
+          """{
         'WATCHLIST_DEFINITIONS': {
           'a_module': {
             'filepath': 'a_module',
@@ -74,16 +72,16 @@ class WatchlistsTest(unittest.TestCase):
           'a_module': %s,
         },
       } """ % watchers
-    watchlists.Watchlists._HasWatchlistsFile.return_value = True
-    watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
+        watchlists.Watchlists._HasWatchlistsFile.return_value = True
+        watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
 
-    wl = watchlists.Watchlists('/a/path')
-    self.assertEqual(wl.GetWatchersForPaths(['a_module']), watchers)
+        wl = watchlists.Watchlists('/a/path')
+        self.assertEqual(wl.GetWatchersForPaths(['a_module']), watchers)
 
-  def testMultipleWatchlistsTrigger(self):
-    """Test that multiple watchlists can get triggered for one filepath."""
-    contents = \
-      """{
+    def testMultipleWatchlistsTrigger(self):
+        """Test that multiple watchlists can get triggered for one filepath."""
+        contents = \
+          """{
         'WATCHLIST_DEFINITIONS': {
           'mac': {
             'filepath': 'mac',
@@ -97,18 +95,18 @@ class WatchlistsTest(unittest.TestCase):
           'views': ['x2@chromium.org'],
         },
       } """
-    watchlists.Watchlists._HasWatchlistsFile.return_value = True
-    watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
-
-    wl = watchlists.Watchlists('/a/path')
-    self.assertEqual(wl.GetWatchersForPaths(['file_views_mac']),
-        ['x1@chromium.org', 'x2@chromium.org'])
-
-  def testDuplicateWatchers(self):
-    """Test that multiple watchlists can get triggered for one filepath."""
-    watchers = ['someone@chromium.org']
-    contents = \
-      """{
+        watchlists.Watchlists._HasWatchlistsFile.return_value = True
+        watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
+
+        wl = watchlists.Watchlists('/a/path')
+        self.assertEqual(wl.GetWatchersForPaths(['file_views_mac']),
+                         ['x1@chromium.org', 'x2@chromium.org'])
+
+    def testDuplicateWatchers(self):
+        """Test that multiple watchlists can get triggered for one filepath."""
+        watchers = ['someone@chromium.org']
+        contents = \
+          """{
         'WATCHLIST_DEFINITIONS': {
           'mac': {
             'filepath': 'mac',
@@ -122,17 +120,17 @@ class WatchlistsTest(unittest.TestCase):
           'views': %s,
         },
       } """ % (watchers, watchers)
-    watchlists.Watchlists._HasWatchlistsFile.return_value = True
-    watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
+        watchlists.Watchlists._HasWatchlistsFile.return_value = True
+        watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
 
-    wl = watchlists.Watchlists('/a/path')
-    self.assertEqual(wl.GetWatchersForPaths(['file_views_mac']), watchers)
+        wl = watchlists.Watchlists('/a/path')
+        self.assertEqual(wl.GetWatchersForPaths(['file_views_mac']), watchers)
 
-  def testWinPathWatchers(self):
-    """Test watchers for a windows path (containing backward slashes)."""
-    watchers = ['abc@def.com', 'x1@xyz.org']
-    contents = \
-      """{
+    def testWinPathWatchers(self):
+        """Test watchers for a windows path (containing backward slashes)."""
+        watchers = ['abc@def.com', 'x1@xyz.org']
+        contents = \
+          """{
         'WATCHLIST_DEFINITIONS': {
           'browser': {
             'filepath': 'chrome/browser/.*',
@@ -142,18 +140,18 @@ class WatchlistsTest(unittest.TestCase):
           'browser': %s,
         },
       } """ % watchers
-    saved_sep = watchlists.os.sep
-    watchlists.os.sep = '\\'  # to pose as win32
-    watchlists.Watchlists._HasWatchlistsFile.return_value = True
-    watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
+        saved_sep = watchlists.os.sep
+        watchlists.os.sep = '\\'  # to pose as win32
+        watchlists.Watchlists._HasWatchlistsFile.return_value = True
+        watchlists.Watchlists._ContentsOfWatchlistsFile.return_value = contents
 
-    wl = watchlists.Watchlists(r'a\path')
-    returned_watchers = wl.GetWatchersForPaths(
-          [r'chrome\browser\renderer_host\render_widget_host.h'])
-    watchlists.os.sep = saved_sep  # revert back os.sep before asserts
-    self.assertEqual(returned_watchers, watchers)
+        wl = watchlists.Watchlists(r'a\path')
+        returned_watchers = wl.GetWatchersForPaths(
+            [r'chrome\browser\renderer_host\render_widget_host.h'])
+        watchlists.os.sep = saved_sep  # revert back os.sep before asserts
+        self.assertEqual(returned_watchers, watchers)
 
 
 if __name__ == '__main__':
-  import unittest
-  unittest.main()
+    import unittest
+    unittest.main()

Niektóre pliki nie zostały wyświetlone z powodu dużej ilości zmienionych plików