Эх сурвалжийг харах

Convert print statements to Python 3 style

Ran "2to3 -w -n -f print ./" and manually added imports.
Ran "^\s*print " and "\s+print " to find batch/shell scripts, comments and the like with embedded code, and updated them manually.
Also manually added imports to files, which used print as a function, but were missing the import.

The scripts still work with Python 2.
There are no intended behaviour changes.

Bug: 942522
Change-Id: Id777e4d4df4adcdfdab1b18bde89f235ef491b9f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/1595684
Reviewed-by: Dirk Pranke <dpranke@chromium.org>
Commit-Queue: Dirk Pranke <dpranke@chromium.org>
Auto-Submit: Raul Tambre <raul@tambre.ee>
Raul Tambre 6 жил өмнө
parent
commit
80ee78e7fa
60 өөрчлөгдсөн 424 нэмэгдсэн , 317 устгасан
  1. 4 2
      annotated_gclient.py
  2. 2 0
      auth.py
  3. 1 1
      autoninja.bat
  4. 3 1
      autoninja.py
  5. 8 6
      buildbucket.py
  6. 6 4
      checkout.py
  7. 9 7
      cit.py
  8. 4 1
      clang_format.py
  9. 9 7
      clang_format_merge_driver.py
  10. 3 1
      compile_single_file.py
  11. 4 2
      dart_format.py
  12. 10 8
      depot-tools-auth.py
  13. 5 2
      fix_encoding.py
  14. 2 0
      gclient-new-workdir.py
  15. 8 9
      gclient.py
  16. 8 6
      gerrit_util.py
  17. 2 2
      git_cl.py
  18. 3 1
      git_drover.py
  19. 8 6
      git_find_releases.py
  20. 7 5
      git_footers.py
  21. 3 1
      git_freezer.py
  22. 7 5
      git_map_branches.py
  23. 8 6
      git_mark_merge_base.py
  24. 7 5
      git_nav_downstream.py
  25. 3 1
      git_number.py
  26. 38 43
      git_rebase_update.py
  27. 8 6
      git_reparent_branch.py
  28. 4 2
      git_upstream_diff.py
  29. 6 4
      gn.py
  30. 3 1
      man/src/filter_demo_output.py
  31. 13 11
      my_activity.py
  32. 34 29
      my_reviews.py
  33. 4 2
      ninjalog_uploader_wrapper.py
  34. 3 1
      owners_finder.py
  35. 17 15
      post_build_ninja_summary.py
  36. 3 1
      presubmit_canned_checks.py
  37. 4 2
      presubmit_support.py
  38. 3 1
      rietveld.py
  39. 2 0
      roll_dep.py
  40. 14 11
      roll_dep_svn.py
  41. 13 11
      split_cl.py
  42. 6 4
      testing_support/coverage_utils.py
  43. 4 2
      testing_support/fake_repos.py
  44. 5 3
      testing_support/gerrit-init.sh
  45. 3 1
      testing_support/gerrit_test_case.py
  46. 3 1
      testing_support/super_mox.py
  47. 2 1
      testing_support/trial_dir.py
  48. 3 1
      tests/checkout_test.py
  49. 4 2
      tests/download_from_google_storage_unittest.py
  50. 8 6
      tests/fix_encoding_test.py
  51. 1 1
      tests/git_cl_test.py
  52. 6 4
      tests/git_common_test.py
  53. 5 3
      tests/presubmit_unittest.py
  54. 1 1
      third_party/upload.py
  55. 8 6
      upload_to_google_storage.py
  56. 5 3
      watchlists.py
  57. 4 2
      weekly
  58. 38 36
      win_toolchain/get_toolchain_if_necessary.py
  59. 10 8
      win_toolchain/package_from_installed.py
  60. 5 3
      wtf

+ 4 - 2
annotated_gclient.py

@@ -12,6 +12,8 @@ To run `gclient sync --gclientfile=.gclient` and annotate got_v8_revision:
       sync --gclientfile=.gclient`
       sync --gclientfile=.gclient`
 """
 """
 
 
+from __future__ import print_function
+
 import contextlib
 import contextlib
 import json
 import json
 import optparse
 import optparse
@@ -31,7 +33,7 @@ def temp_filename(suffix='', prefix='tmp'):
   try:
   try:
     os.remove(output_file)
     os.remove(output_file)
   except OSError as e:
   except OSError as e:
-    print 'Error cleaning up temp file %s: %s' % (output_file, e)
+    print('Error cleaning up temp file %s: %s' % (output_file, e))
 
 
 
 
 def parse_got_revision(filename, revision_mapping):
 def parse_got_revision(filename, revision_mapping):
@@ -51,7 +53,7 @@ def parse_got_revision(filename, revision_mapping):
 
 
 def emit_buildprops(got_revisions):
 def emit_buildprops(got_revisions):
   for prop, revision in got_revisions.iteritems():
   for prop, revision in got_revisions.iteritems():
-    print '@@@SET_BUILD_PROPERTY@%s@%s@@@' % (prop, json.dumps(revision))
+    print('@@@SET_BUILD_PROPERTY@%s@%s@@@' % (prop, json.dumps(revision)))
 
 
 
 
 def main():
 def main():

+ 2 - 0
auth.py

@@ -4,6 +4,8 @@
 
 
 """Google OAuth2 related functions."""
 """Google OAuth2 related functions."""
 
 
+from __future__ import print_function
+
 import BaseHTTPServer
 import BaseHTTPServer
 import collections
 import collections
 import datetime
 import datetime

+ 1 - 1
autoninja.bat

@@ -6,7 +6,7 @@
 setlocal
 setlocal
 
 
 REM Set unique build ID.
 REM Set unique build ID.
-FOR /f "usebackq tokens=*" %%a in (`python -c "import uuid; print uuid.uuid4()"`) do set AUTONINJA_BUILD_ID=%%a
+FOR /f "usebackq tokens=*" %%a in (`python -c "from __future__ import print_function; import uuid; print(uuid.uuid4())"`) do set AUTONINJA_BUILD_ID=%%a
 
 
 REM If a build performance summary has been requested then also set NINJA_STATUS
 REM If a build performance summary has been requested then also set NINJA_STATUS
 REM to trigger more verbose status updates. In particular this makes it possible
 REM to trigger more verbose status updates. In particular this makes it possible

+ 3 - 1
autoninja.py

@@ -10,6 +10,8 @@ and safer, and avoids errors that can cause slow goma builds or swap-storms
 on non-goma builds.
 on non-goma builds.
 """
 """
 
 
+from __future__ import print_function
+
 import multiprocessing
 import multiprocessing
 import os
 import os
 import re
 import re
@@ -108,4 +110,4 @@ for i in range(len(args)):
   if (i == 0 and sys.platform.startswith('win')) or ' ' in args[i]:
   if (i == 0 and sys.platform.startswith('win')) or ' ' in args[i]:
     args[i] = '"%s"' % args[i].replace('"', '\\"')
     args[i] = '"%s"' % args[i].replace('"', '\\"')
 
 
-print ' '.join(args)
+print(' '.join(args))

+ 8 - 6
buildbucket.py

@@ -15,6 +15,8 @@ Usage:
   Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
   Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
 """
 """
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import json
 import json
 import urlparse
 import urlparse
@@ -153,9 +155,9 @@ def main(argv):
   http.force_exception_to_status_code = True
   http.force_exception_to_status_code = True
 
 
   if args.verbose:
   if args.verbose:
-    print 'Request URL:', url
-    print 'Request method:', method
-    print 'Request body:', body
+    print('Request URL:', url)
+    print('Request method:', method)
+    print('Request body:', body)
 
 
   response, content = http.request(
   response, content = http.request(
     url,
     url,
@@ -165,8 +167,8 @@ def main(argv):
   )
   )
 
 
   if args.verbose:
   if args.verbose:
-    print 'Response:', response
-    print 'Content:', content
+    print('Response:', response)
+    print('Content:', content)
 
 
   try:
   try:
     content_json = json.loads(content)
     content_json = json.loads(content)
@@ -177,7 +179,7 @@ def main(argv):
   except (ValueError, TypeError, KeyError):
   except (ValueError, TypeError, KeyError):
     pass
     pass
   else:
   else:
-    print 'Build: %s' % build_url
+    print('Build: %s' % build_url)
 
 
   return response.status != 200
   return response.status != 200
 
 

+ 6 - 4
checkout.py

@@ -7,6 +7,8 @@
 Includes support only for git.
 Includes support only for git.
 """
 """
 
 
+from __future__ import print_function
+
 import fnmatch
 import fnmatch
 import logging
 import logging
 import os
 import os
@@ -288,8 +290,8 @@ class GitCheckout(CheckoutBase):
         for post in post_processors:
         for post in post_processors:
           post(self, p)
           post(self, p)
         if verbose:
         if verbose:
-          print p.filename
-          print align_stdout(stdout)
+          print(p.filename)
+          print(align_stdout(stdout))
       except OSError, e:
       except OSError, e:
         errors.append((p, '%s%s' % (align_stdout(stdout), e)))
         errors.append((p, '%s%s' % (align_stdout(stdout), e)))
       except subprocess.CalledProcessError, e:
       except subprocess.CalledProcessError, e:
@@ -307,9 +309,9 @@ class GitCheckout(CheckoutBase):
       extra_files = sorted(set(found_files) - set(patches.filenames))
       extra_files = sorted(set(found_files) - set(patches.filenames))
       unpatched_files = sorted(set(patches.filenames) - set(found_files))
       unpatched_files = sorted(set(patches.filenames) - set(found_files))
       if extra_files:
       if extra_files:
-        print 'Found extra files: %r' % (extra_files,)
+        print('Found extra files: %r' % extra_files)
       if unpatched_files:
       if unpatched_files:
-        print 'Found unpatched files: %r' % (unpatched_files,)
+        print('Found unpatched files: %r' % unpatched_files)
 
 
 
 
   def commit(self, commit_message, user):
   def commit(self, commit_message, user):

+ 9 - 7
cit.py

@@ -14,6 +14,8 @@ This tool does a two things:
 
 
 # TODO(hinoka,iannucci): Pre-pack infra tools in cipd package with vpython spec.
 # TODO(hinoka,iannucci): Pre-pack infra tools in cipd package with vpython spec.
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import sys
 import sys
 import os
 import os
@@ -102,20 +104,20 @@ def get_available_tools():
 
 
 def usage():
 def usage():
   infra_tools, cipd_tools = get_available_tools()
   infra_tools, cipd_tools = get_available_tools()
-  print """usage: cit.py <name of tool> [args for tool]
+  print("""usage: cit.py <name of tool> [args for tool]
 
 
   Wrapper for maintaining and calling tools in:
   Wrapper for maintaining and calling tools in:
     "infra.git/run.py infra.tools.*"
     "infra.git/run.py infra.tools.*"
     "infra.git/cipd/*"
     "infra.git/cipd/*"
 
 
-  Available infra tools are:"""
+  Available infra tools are:""")
   for tool in infra_tools:
   for tool in infra_tools:
-    print '  * %s' % tool
+    print('  * %s' % tool)
 
 
-  print """
-  Available cipd tools are:"""
+  print("""
+  Available cipd tools are:""")
   for tool in cipd_tools:
   for tool in cipd_tools:
-    print '  * %s' % tool
+    print('  * %s' % tool)
 
 
 
 
 def run(args):
 def run(args):
@@ -137,7 +139,7 @@ def run(args):
   elif os.path.isfile(cipd_file) and is_exe(cipd_file):
   elif os.path.isfile(cipd_file) and is_exe(cipd_file):
     cmd = [cipd_file]
     cmd = [cipd_file]
   else:
   else:
-    print >>sys.stderr, 'Unknown tool "%s"' % tool_name
+    print('Unknown tool "%s"' % tool_name, file=sys.stderr)
     return usage()
     return usage()
 
 
   # Add the remaining arguments.
   # Add the remaining arguments.

+ 4 - 1
clang_format.py

@@ -9,6 +9,8 @@ clang-format binaries are pulled down from Google Cloud Storage whenever you
 sync Chrome, to platform-specific locations. This script knows how to locate
 sync Chrome, to platform-specific locations. This script knows how to locate
 those tools, assuming the script is invoked from inside a Chromium checkout."""
 those tools, assuming the script is invoked from inside a Chromium checkout."""
 
 
+from __future__ import print_function
+
 import gclient_paths
 import gclient_paths
 import os
 import os
 import subprocess
 import subprocess
@@ -63,7 +65,8 @@ def main(args):
   # redirection can be a little opaque.
   # redirection can be a little opaque.
   help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
   help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
   if any(match in args for match in help_syntax):
   if any(match in args for match in help_syntax):
-    print '\nDepot tools redirects you to the clang-format at:\n    %s\n' % tool
+    print(
+        '\nDepot tools redirects you to the clang-format at:\n    %s\n' % tool)
 
 
   return subprocess.call([tool] + args)
   return subprocess.call([tool] + args)
 
 

+ 9 - 7
clang_format_merge_driver.py

@@ -14,6 +14,8 @@ See https://git-scm.com/docs/gitattributes ("Defining a custom merge
 driver") for more details.
 driver") for more details.
 """
 """
 
 
+from __future__ import print_function
+
 import subprocess
 import subprocess
 import sys
 import sys
 
 
@@ -29,14 +31,14 @@ def main():
   base, current, others, file_name_in_tree = sys.argv[1:5]
   base, current, others, file_name_in_tree = sys.argv[1:5]
 
 
   if file_name_in_tree == '%P':
   if file_name_in_tree == '%P':
-    print >>sys.stderr
-    print >>sys.stderr, 'ERROR: clang-format merge driver needs git 2.5+'
+    print(file=sys.stderr)
+    print('ERROR: clang-format merge driver needs git 2.5+', file=sys.stderr)
     if sys.platform == 'darwin':
     if sys.platform == 'darwin':
-      print >>sys.stderr, 'Upgrade to Xcode 7.2+'
-    print >>sys.stderr
+      print('Upgrade to Xcode 7.2+', file=sys.stderr)
+    print(file=sys.stderr)
     return 1
     return 1
 
 
-  print 'Running clang-format 3-way merge driver on ' + file_name_in_tree
+  print('Running clang-format 3-way merge driver on ' + file_name_in_tree)
 
 
   try:
   try:
     tool = clang_format.FindClangFormatToolInChromiumTree()
     tool = clang_format.FindClangFormatToolInChromiumTree()
@@ -56,8 +58,8 @@ def main():
       with open(fpath, 'wb') as output_file:
       with open(fpath, 'wb') as output_file:
         output_file.write(output)
         output_file.write(output)
   except clang_format.NotFoundError, e:
   except clang_format.NotFoundError, e:
-    print e
-    print 'Failed to find clang-format. Falling-back on standard 3-way merge'
+    print(e)
+    print('Failed to find clang-format. Falling-back on standard 3-way merge')
 
 
   return subprocess.call(['git', 'merge-file', '-Lcurrent', '-Lbase', '-Lother',
   return subprocess.call(['git', 'merge-file', '-Lcurrent', '-Lbase', '-Lother',
                           current, base, others])
                           current, base, others])

+ 3 - 1
compile_single_file.py

@@ -3,6 +3,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # found in the LICENSE file.
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import os
 import os
 import subprocess
 import subprocess
@@ -47,7 +49,7 @@ def main():
   abs_build_dir = os.path.join(src_dir, options.build_dir)
   abs_build_dir = os.path.join(src_dir, options.build_dir)
   src_relpath = os.path.relpath(options.file_path, abs_build_dir)
   src_relpath = os.path.relpath(options.file_path, abs_build_dir)
 
 
-  print 'Building %s' % options.file_path
+  print('Building %s' % options.file_path)
 
 
   ninja_exec = 'ninja'
   ninja_exec = 'ninja'
   carets = '^'
   carets = '^'

+ 4 - 2
dart_format.py

@@ -10,6 +10,8 @@ dartfmt binaries are pulled down during gclient sync in the mojo repo.
 This tool is named dart_format.py instead of dartfmt to parallel
 This tool is named dart_format.py instead of dartfmt to parallel
 clang_format.py, which is in this same repository."""
 clang_format.py, which is in this same repository."""
 
 
+from __future__ import print_function
+
 import os
 import os
 import subprocess
 import subprocess
 import sys
 import sys
@@ -42,14 +44,14 @@ def main(args):
   try:
   try:
     tool = FindDartFmtToolInChromiumTree()
     tool = FindDartFmtToolInChromiumTree()
   except NotFoundError, e:
   except NotFoundError, e:
-    print >> sys.stderr, e
+    print(e, file=sys.stderr)
     sys.exit(1)
     sys.exit(1)
 
 
   # Add some visibility to --help showing where the tool lives, since this
   # Add some visibility to --help showing where the tool lives, since this
   # redirection can be a little opaque.
   # redirection can be a little opaque.
   help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
   help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
   if any(match in args for match in help_syntax):
   if any(match in args for match in help_syntax):
-    print '\nDepot tools redirects you to the dartfmt at:\n    %s\n' % tool
+    print('\nDepot tools redirects you to the dartfmt at:\n    %s\n' % tool)
 
 
   return subprocess.call([tool] + sys.argv[1:])
   return subprocess.call([tool] + sys.argv[1:])
 
 

+ 10 - 8
depot-tools-auth.py

@@ -11,6 +11,8 @@ Usage:
   depot-tools-auth logout codereview.chromium.org
   depot-tools-auth logout codereview.chromium.org
 """
 """
 
 
+from __future__ import print_function
+
 import logging
 import logging
 import optparse
 import optparse
 import sys
 import sys
@@ -39,7 +41,7 @@ def CMDlogout(parser, args):
   """Revokes cached authentication token and removes it from disk."""
   """Revokes cached authentication token and removes it from disk."""
   _, authenticator = parser.parse_args(args)
   _, authenticator = parser.parse_args(args)
   done = authenticator.logout()
   done = authenticator.logout()
-  print 'Done.' if done else 'Already logged out.'
+  print('Done.' if done else 'Already logged out.')
   return 0
   return 0
 
 
 
 
@@ -54,12 +56,12 @@ def CMDinfo(parser, args):
 
 
 def print_token_info(hostname, authenticator):
 def print_token_info(hostname, authenticator):
   token_info = authenticator.get_token_info()
   token_info = authenticator.get_token_info()
-  print 'Logged in to %s as %s.' % (hostname, token_info['email'])
-  print ''
-  print 'To login with a different email run:'
-  print '  depot-tools-auth login %s' % hostname
-  print 'To logout and purge the authentication token run:'
-  print '  depot-tools-auth logout %s' % hostname
+  print('Logged in to %s as %s.' % (hostname, token_info['email']))
+  print('')
+  print('To login with a different email run:')
+  print('  depot-tools-auth login %s' % hostname)
+  print('To logout and purge the authentication token run:')
+  print('  depot-tools-auth logout %s' % hostname)
 
 
 
 
 class OptionParser(optparse.OptionParser):
 class OptionParser(optparse.OptionParser):
@@ -89,7 +91,7 @@ def main(argv):
   try:
   try:
     return dispatcher.execute(OptionParser(), argv)
     return dispatcher.execute(OptionParser(), argv)
   except auth.AuthenticationError as e:
   except auth.AuthenticationError as e:
-    print >> sys.stderr, e
+    print(e, file=sys.stderr)
     return 1
     return 1
 
 
 
 

+ 5 - 2
fix_encoding.py

@@ -6,6 +6,8 @@
 multiple platforms with python.
 multiple platforms with python.
 """
 """
 
 
+from __future__ import print_function
+
 import codecs
 import codecs
 import locale
 import locale
 import os
 import os
@@ -22,8 +24,9 @@ def complain(message):
   to our wrapper. So be paranoid about catching errors and reporting them
   to our wrapper. So be paranoid about catching errors and reporting them
   to sys.__stderr__, so that the user has a higher chance to see them.
   to sys.__stderr__, so that the user has a higher chance to see them.
   """
   """
-  print >> sys.__stderr__, (
-      isinstance(message, str) and message or repr(message))
+  print(
+      isinstance(message, str) and message or repr(message),
+      file=sys.__stderr__)
 
 
 
 
 def fix_default_encoding():
 def fix_default_encoding():

+ 2 - 0
gclient-new-workdir.py

@@ -7,6 +7,8 @@
 #    gclient-new-workdir.py [options] <repository> <new_workdir>
 #    gclient-new-workdir.py [options] <repository> <new_workdir>
 #
 #
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import os
 import os
 import shutil
 import shutil

+ 8 - 9
gclient.py

@@ -1650,12 +1650,11 @@ it or fix the checkout.
               # clean checkout.
               # clean checkout.
               gclient_scm.scm.GIT.CleanupDir(par_scm_root, rel_e_dir)
               gclient_scm.scm.GIT.CleanupDir(par_scm_root, rel_e_dir)
               assert not os.path.exists(os.path.join(e_dir, '.git'))
               assert not os.path.exists(os.path.join(e_dir, '.git'))
-              print(('\nWARNING: \'%s\' has been moved from DEPS to a higher '
-                     'level checkout. The git folder containing all the local'
-                     ' branches has been saved to %s.\n'
-                     'If you don\'t care about its state you can safely '
-                     'remove that folder to free up space.') %
-                    (entry, save_dir))
+              print('\nWARNING: \'%s\' has been moved from DEPS to a higher '
+                    'level checkout. The git folder containing all the local'
+                    ' branches has been saved to %s.\n'
+                    'If you don\'t care about its state you can safely '
+                    'remove that folder to free up space.' % (entry, save_dir))
               continue
               continue
 
 
         if scm_root in full_entries:
         if scm_root in full_entries:
@@ -1684,9 +1683,9 @@ it or fix the checkout.
                   should_recurse=False,
                   should_recurse=False,
                   relative=None,
                   relative=None,
                   condition=None))
                   condition=None))
-          print(('\nWARNING: \'%s\' is no longer part of this client.\n'
-                 'It is recommended that you manually remove it or use '
-                 '\'gclient sync -D\' next time.') % entry_fixed)
+          print('\nWARNING: \'%s\' is no longer part of this client.\n'
+                'It is recommended that you manually remove it or use '
+                '\'gclient sync -D\' next time.' % entry_fixed)
         else:
         else:
           # Delete the entry
           # Delete the entry
           print('\n________ deleting \'%s\' in \'%s\'' % (
           print('\n________ deleting \'%s\' in \'%s\'' % (

+ 8 - 6
gerrit_util.py

@@ -8,6 +8,8 @@ Utilities for requesting information for a gerrit server via https.
 https://gerrit-review.googlesource.com/Documentation/rest-api.html
 https://gerrit-review.googlesource.com/Documentation/rest-api.html
 """
 """
 
 
+from __future__ import print_function
+
 import base64
 import base64
 import contextlib
 import contextlib
 import cookielib
 import cookielib
@@ -166,10 +168,10 @@ class CookiesAuthenticator(Authenticator):
 
 
     st = os.stat(path)
     st = os.stat(path)
     if st.st_mode & (stat.S_IRWXG | stat.S_IRWXO):
     if st.st_mode & (stat.S_IRWXG | stat.S_IRWXO):
-      print >> sys.stderr, (
+      print(
           'WARNING: netrc file %s cannot be used because its file '
           'WARNING: netrc file %s cannot be used because its file '
           'permissions are insecure.  netrc file permissions should be '
           'permissions are insecure.  netrc file permissions should be '
-          '600.' % path)
+          '600.' % path, file=sys.stderr)
     with open(path) as fd:
     with open(path) as fd:
       content = fd.read()
       content = fd.read()
 
 
@@ -189,11 +191,11 @@ class CookiesAuthenticator(Authenticator):
     try:
     try:
       return netrc.netrc(path)
       return netrc.netrc(path)
     except IOError:
     except IOError:
-      print >> sys.stderr, 'WARNING: Could not read netrc file %s' % path
+      print('WARNING: Could not read netrc file %s' % path, file=sys.stderr)
       return netrc.netrc(os.devnull)
       return netrc.netrc(os.devnull)
     except netrc.NetrcParseError as e:
     except netrc.NetrcParseError as e:
-      print >> sys.stderr, ('ERROR: Cannot use netrc file %s due to a '
-                            'parsing error: %s' % (path, e))
+      print('ERROR: Cannot use netrc file %s due to a parsing error: %s' %
+          (path, e), file=sys.stderr)
       return netrc.netrc(os.devnull)
       return netrc.netrc(os.devnull)
 
 
   @classmethod
   @classmethod
@@ -786,7 +788,7 @@ def AddReviewers(host, change, reviewers=None, ccs=None, notify=True,
      'reviewer': r,
      'reviewer': r,
      'state': state,
      'state': state,
      'notify': 'NONE',  # We handled `notify` argument above.
      'notify': 'NONE',  # We handled `notify` argument above.
-   })
+    })
 
 
   conn = CreateHttpConn(host, path, reqtype='POST', body=body)
   conn = CreateHttpConn(host, path, reqtype='POST', body=body)
   # Gerrit will return 400 if one or more of the requested reviewers are
   # Gerrit will return 400 if one or more of the requested reviewers are

+ 2 - 2
git_cl.py

@@ -1998,14 +1998,14 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
       if gerrit_auth == git_auth:
       if gerrit_auth == git_auth:
         return
         return
       all_gsrc = cookie_auth.get_auth_header('d0esN0tEx1st.googlesource.com')
       all_gsrc = cookie_auth.get_auth_header('d0esN0tEx1st.googlesource.com')
-      print((
+      print(
           'WARNING: You have different credentials for Gerrit and git hosts:\n'
           'WARNING: You have different credentials for Gerrit and git hosts:\n'
           '           %s\n'
           '           %s\n'
           '           %s\n'
           '           %s\n'
           '        Consider running the following command:\n'
           '        Consider running the following command:\n'
           '          git cl creds-check\n'
           '          git cl creds-check\n'
           '        %s\n'
           '        %s\n'
-          '        %s') %
+          '        %s' %
           (git_host, self._gerrit_host,
           (git_host, self._gerrit_host,
            ('Hint: delete creds for .googlesource.com' if all_gsrc else ''),
            ('Hint: delete creds for .googlesource.com' if all_gsrc else ''),
            cookie_auth.get_new_password_message(git_host)))
            cookie_auth.get_new_password_message(git_host)))

+ 3 - 1
git_drover.py

@@ -4,6 +4,8 @@
 # found in the LICENSE file.
 # found in the LICENSE file.
 """git drover: A tool for merging changes to release branches."""
 """git drover: A tool for merging changes to release branches."""
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import cPickle
 import cPickle
 import functools
 import functools
@@ -452,7 +454,7 @@ def main():
                          options.parent_checkout, options.dry_run,
                          options.parent_checkout, options.dry_run,
                          options.verbose)
                          options.verbose)
   except Error as e:
   except Error as e:
-    print 'Error:', e.message
+    print('Error:', e.message)
     sys.exit(128)
     sys.exit(128)
 
 
 
 

+ 8 - 6
git_find_releases.py

@@ -11,6 +11,8 @@ Note that it uses the "cherry picked from" annotation to find merges, so it will
 only work on merges that followed the "use cherry-pick -x" instructions.
 only work on merges that followed the "use cherry-pick -x" instructions.
 """
 """
 
 
+from __future__ import print_function
+
 import optparse
 import optparse
 import re
 import re
 import sys
 import sys
@@ -43,16 +45,16 @@ def main():
   for arg in args:
   for arg in args:
     commit_name = GetNameForCommit(arg)
     commit_name = GetNameForCommit(arg)
     if not commit_name:
     if not commit_name:
-      print '%s not found' % arg
+      print('%s not found' % arg)
       return 1
       return 1
-    print 'commit %s was:' % arg
-    print '  initially in ' + commit_name
+    print('commit %s was:' % arg)
+    print('  initially in ' + commit_name)
     merges = GetMergesForCommit(arg)
     merges = GetMergesForCommit(arg)
     for merge in merges:
     for merge in merges:
-      print '  merged to ' + GetNameForCommit(merge) + ' (as ' + merge + ')'
+      print('  merged to ' + GetNameForCommit(merge) + ' (as ' + merge + ')')
     if not merges:
     if not merges:
-      print 'No merges found. If this seems wrong, be sure that you did:'
-      print '  git fetch origin && gclient sync --with_branch_heads'
+      print('No merges found. If this seems wrong, be sure that you did:')
+      print('  git fetch origin && gclient sync --with_branch_heads')
 
 
   return 0
   return 0
 
 

+ 7 - 5
git_footers.py

@@ -3,6 +3,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # found in the LICENSE file.
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import json
 import json
 import re
 import re
@@ -229,23 +231,23 @@ def main(args):
 
 
   if opts.key:
   if opts.key:
     for v in footers.get(normalize_name(opts.key), []):
     for v in footers.get(normalize_name(opts.key), []):
-      print v
+      print(v)
   elif opts.position:
   elif opts.position:
     pos = get_position(footers)
     pos = get_position(footers)
-    print '%s@{#%s}' % (pos[0], pos[1] or '?')
+    print('%s@{#%s}' % (pos[0], pos[1] or '?'))
   elif opts.position_ref:
   elif opts.position_ref:
-    print get_position(footers)[0]
+    print(get_position(footers)[0])
   elif opts.position_num:
   elif opts.position_num:
     pos = get_position(footers)
     pos = get_position(footers)
     assert pos[1], 'No valid position for commit'
     assert pos[1], 'No valid position for commit'
-    print pos[1]
+    print(pos[1])
   elif opts.json:
   elif opts.json:
     with open(opts.json, 'w') as f:
     with open(opts.json, 'w') as f:
       json.dump(footers, f)
       json.dump(footers, f)
   else:
   else:
     for k in footers.keys():
     for k in footers.keys():
       for v in footers[k]:
       for v in footers[k]:
-        print '%s: %s' % (k, v)
+        print('%s: %s' % (k, v))
   return 0
   return 0
 
 
 
 

+ 3 - 1
git_freezer.py

@@ -3,6 +3,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # found in the LICENSE file.
 
 
+from __future__ import print_function
+
 import sys
 import sys
 import optparse
 import optparse
 
 
@@ -26,7 +28,7 @@ def main(args):
   dispatcher = subcommand.CommandDispatcher(__name__)
   dispatcher = subcommand.CommandDispatcher(__name__)
   ret = dispatcher.execute(optparse.OptionParser(), args)
   ret = dispatcher.execute(optparse.OptionParser(), args)
   if ret:
   if ret:
-    print ret
+    print(ret)
   return 0
   return 0
 
 
 
 

+ 7 - 5
git_map_branches.py

@@ -24,6 +24,8 @@ Branches are colorized as follows:
     upstream, then you will see this.
     upstream, then you will see this.
 """
 """
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import collections
 import collections
 import os
 import os
@@ -185,8 +187,8 @@ class BranchMapper(object):
       parent = self.__branches_info[cycle[-1]].upstream
       parent = self.__branches_info[cycle[-1]].upstream
       cycle.append(parent)
       cycle.append(parent)
       if parent == branch:
       if parent == branch:
-        print >> sys.stderr, 'Warning: Detected cycle in branches: {}'.format(
-            ' -> '.join(cycle))
+        print('Warning: Detected cycle in branches: {}'.format(
+            ' -> '.join(cycle)), file=sys.stderr)
         return True
         return True
     return False
     return False
 
 
@@ -312,11 +314,11 @@ def print_desc():
 def main(argv):
 def main(argv):
   setup_color.init()
   setup_color.init()
   if get_git_version() < MIN_UPSTREAM_TRACK_GIT_VERSION:
   if get_git_version() < MIN_UPSTREAM_TRACK_GIT_VERSION:
-    print >> sys.stderr, (
+    print(
         'This tool will not show all tracking information for git version '
         'This tool will not show all tracking information for git version '
         'earlier than ' +
         'earlier than ' +
         '.'.join(str(x) for x in MIN_UPSTREAM_TRACK_GIT_VERSION) +
         '.'.join(str(x) for x in MIN_UPSTREAM_TRACK_GIT_VERSION) +
-        '. Please consider upgrading.')
+        '. Please consider upgrading.', file=sys.stderr)
 
 
   if '-h' in argv:
   if '-h' in argv:
     print_desc()
     print_desc()
@@ -342,7 +344,7 @@ def main(argv):
   mapper.maxjobs = opts.maxjobs
   mapper.maxjobs = opts.maxjobs
   mapper.show_subject = opts.show_subject
   mapper.show_subject = opts.show_subject
   mapper.start()
   mapper.start()
-  print mapper.output.as_formatted_string()
+  print(mapper.output.as_formatted_string())
   return 0
   return 0
 
 
 if __name__ == '__main__':
 if __name__ == '__main__':

+ 8 - 6
git_mark_merge_base.py

@@ -11,6 +11,8 @@ purposes of the chromium depot_tools git extensions. Passing no arguments will
 just print the effective merge base for the current branch.
 just print the effective merge base for the current branch.
 """
 """
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import sys
 import sys
 
 
@@ -39,16 +41,16 @@ def main(argv):
     try:
     try:
       remove_merge_base(cur)
       remove_merge_base(cur)
     except CalledProcessError:
     except CalledProcessError:
-      print 'No merge base currently exists for %s.' % cur
+      print('No merge base currently exists for %s.' % cur)
     return 0
     return 0
 
 
   if opts.merge_base:
   if opts.merge_base:
     try:
     try:
       opts.merge_base = hash_one(opts.merge_base)
       opts.merge_base = hash_one(opts.merge_base)
     except CalledProcessError:
     except CalledProcessError:
-      print >> sys.stderr, (
-          'fatal: could not resolve %s as a commit' % (opts.merge_base)
-      )
+      print(
+          'fatal: could not resolve %s as a commit' % opts.merge_base,
+          file=sys.stderr)
       return 1
       return 1
 
 
     manual_merge_base(cur, opts.merge_base, upstream(cur))
     manual_merge_base(cur, opts.merge_base, upstream(cur))
@@ -57,9 +59,9 @@ def main(argv):
   actual = get_or_create_merge_base(cur)
   actual = get_or_create_merge_base(cur)
   if opts.merge_base and opts.merge_base != actual:
   if opts.merge_base and opts.merge_base != actual:
     ret = 1
     ret = 1
-    print "Invalid merge_base %s" % opts.merge_base
+    print("Invalid merge_base %s" % opts.merge_base)
 
 
-  print "merge_base(%s): %s" % (cur, actual)
+  print("merge_base(%s): %s" % (cur, actual))
   return ret
   return ret
 
 
 
 

+ 7 - 5
git_nav_downstream.py

@@ -9,6 +9,8 @@ is more than one downstream branch, then this script will prompt you to select
 which branch.
 which branch.
 """
 """
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import sys
 import sys
 
 
@@ -36,24 +38,24 @@ def main(args):
     cur = hash_one(cur)
     cur = hash_one(cur)
   downstreams = [b for b in branches() if upfn(b) == cur]
   downstreams = [b for b in branches() if upfn(b) == cur]
   if not downstreams:
   if not downstreams:
-    print "No downstream branches"
+    print("No downstream branches")
     return 1
     return 1
   elif len(downstreams) == 1:
   elif len(downstreams) == 1:
     run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
     run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
   else:
   else:
     high = len(downstreams) - 1
     high = len(downstreams) - 1
     while True:
     while True:
-      print "Please select a downstream branch"
+      print("Please select a downstream branch")
       for i, b in enumerate(downstreams):
       for i, b in enumerate(downstreams):
-        print "  %d. %s" % (i, b)
+        print("  %d. %s" % (i, b))
       prompt = "Selection (0-%d)[0]: " % high
       prompt = "Selection (0-%d)[0]: " % high
       r = opts.pick
       r = opts.pick
       if r:
       if r:
-        print prompt + r
+        print(prompt + r)
       else:
       else:
         r = raw_input(prompt).strip() or '0'
         r = raw_input(prompt).strip() or '0'
       if not r.isdigit() or (0 > int(r) > high):
       if not r.isdigit() or (0 > int(r) > high):
-        print "Invalid choice."
+        print("Invalid choice.")
       else:
       else:
         run('checkout', downstreams[int(r)], stdout=sys.stdout,
         run('checkout', downstreams[int(r)], stdout=sys.stdout,
             stderr=sys.stderr)
             stderr=sys.stderr)

+ 3 - 1
git_number.py

@@ -21,6 +21,8 @@ commit's entire history, this script caches all calculated data inside the git
 repo that it operates on in the ref 'refs/number/commits'.
 repo that it operates on in the ref 'refs/number/commits'.
 """
 """
 
 
+from __future__ import print_function
+
 import binascii
 import binascii
 import collections
 import collections
 import logging
 import logging
@@ -278,7 +280,7 @@ def main():  # pragma: no cover
   if not opts.no_cache:
   if not opts.no_cache:
     finalize(targets)
     finalize(targets)
 
 
-  print '\n'.join(map(str, map(get_num, targets)))
+  print('\n'.join(map(str, map(get_num, targets))))
   return 0
   return 0
 
 
 
 

+ 38 - 43
git_rebase_update.py

@@ -7,6 +7,8 @@
 Tool to update all branches to have the latest changes from their upstreams.
 Tool to update all branches to have the latest changes from their upstreams.
 """
 """
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import collections
 import collections
 import logging
 import logging
@@ -76,7 +78,7 @@ def fetch_remotes(branch_tree):
   # TODO(iannucci): Should we fetch git-svn?
   # TODO(iannucci): Should we fetch git-svn?
 
 
   if not fetch_args:  # pragma: no cover
   if not fetch_args:  # pragma: no cover
-    print 'Nothing to fetch.'
+    print('Nothing to fetch.')
   else:
   else:
     git.run_with_stderr('fetch', *fetch_args, stdout=sys.stdout,
     git.run_with_stderr('fetch', *fetch_args, stdout=sys.stdout,
                         stderr=sys.stderr)
                         stderr=sys.stderr)
@@ -124,16 +126,16 @@ def remove_empty_branches(branch_tree):
     if parent in tag_set:
     if parent in tag_set:
       git.set_branch_config(branch, 'remote', '.')
       git.set_branch_config(branch, 'remote', '.')
       git.set_branch_config(branch, 'merge', 'refs/tags/%s' % parent)
       git.set_branch_config(branch, 'merge', 'refs/tags/%s' % parent)
-      print ('Reparented %s to track %s [tag] (was tracking %s)'
-             % (branch, parent, old_parent))
+      print('Reparented %s to track %s [tag] (was tracking %s)' %
+            (branch, parent, old_parent))
     else:
     else:
       git.run('branch', '--set-upstream-to', parent, branch)
       git.run('branch', '--set-upstream-to', parent, branch)
-      print ('Reparented %s to track %s (was tracking %s)'
-             % (branch, parent, old_parent))
+      print('Reparented %s to track %s (was tracking %s)' % (branch, parent,
+                                                             old_parent))
 
 
   # Apply all deletions recorded, in order.
   # Apply all deletions recorded, in order.
   for branch, _ in sorted(deletions.iteritems(), key=lambda x: x[1]):
   for branch, _ in sorted(deletions.iteritems(), key=lambda x: x[1]):
-    print git.run('branch', '-d', branch)
+    print(git.run('branch', '-d', branch))
 
 
 
 
 def rebase_branch(branch, parent, start_hash):
 def rebase_branch(branch, parent, start_hash):
@@ -156,11 +158,11 @@ def rebase_branch(branch, parent, start_hash):
 
 
   if git.hash_one(parent) != start_hash:
   if git.hash_one(parent) != start_hash:
     # Try a plain rebase first
     # Try a plain rebase first
-    print 'Rebasing:', branch
+    print('Rebasing:', branch)
     rebase_ret = git.rebase(parent, start_hash, branch, abort=True)
     rebase_ret = git.rebase(parent, start_hash, branch, abort=True)
     if not rebase_ret.success:
     if not rebase_ret.success:
       # TODO(iannucci): Find collapsible branches in a smarter way?
       # TODO(iannucci): Find collapsible branches in a smarter way?
-      print "Failed! Attempting to squash", branch, "...",
+      print("Failed! Attempting to squash", branch, "...", end=' ')
       sys.stdout.flush()
       sys.stdout.flush()
       squash_branch = branch+"_squash_attempt"
       squash_branch = branch+"_squash_attempt"
       git.run('checkout', '-b', squash_branch)
       git.run('checkout', '-b', squash_branch)
@@ -172,12 +174,12 @@ def rebase_branch(branch, parent, start_hash):
       git.run('checkout', branch)
       git.run('checkout', branch)
       git.run('branch', '-D', squash_branch)
       git.run('branch', '-D', squash_branch)
       if squash_ret.success and empty_rebase:
       if squash_ret.success and empty_rebase:
-        print 'Success!'
+        print('Success!')
         git.squash_current_branch(merge_base=start_hash)
         git.squash_current_branch(merge_base=start_hash)
         git.rebase(parent, start_hash, branch)
         git.rebase(parent, start_hash, branch)
       else:
       else:
-        print "Failed!"
-        print
+        print("Failed!")
+        print()
 
 
         # rebase and leave in mid-rebase state.
         # rebase and leave in mid-rebase state.
         # This second rebase attempt should always fail in the same
         # This second rebase attempt should always fail in the same
@@ -185,17 +187,16 @@ def rebase_branch(branch, parent, start_hash):
         # something very strange has happened.
         # something very strange has happened.
         second_rebase_ret = git.rebase(parent, start_hash, branch)
         second_rebase_ret = git.rebase(parent, start_hash, branch)
         if second_rebase_ret.success: # pragma: no cover
         if second_rebase_ret.success: # pragma: no cover
-          print "Second rebase succeeded unexpectedly!"
-          print "Please see: http://crbug.com/425696"
-          print "First rebased failed with:"
-          print rebase_ret.stderr
+          print("Second rebase succeeded unexpectedly!")
+          print("Please see: http://crbug.com/425696")
+          print("First rebased failed with:")
+          print(rebase_ret.stderr)
         else:
         else:
-          print "Here's what git-rebase (squashed) had to say:"
-          print
-          print squash_ret.stdout
-          print squash_ret.stderr
-          print textwrap.dedent(
-          """\
+          print("Here's what git-rebase (squashed) had to say:")
+          print()
+          print(squash_ret.stdout)
+          print(squash_ret.stderr)
+          print(textwrap.dedent("""\
           Squashing failed. You probably have a real merge conflict.
           Squashing failed. You probably have a real merge conflict.
 
 
           Your working copy is in mid-rebase. Either:
           Your working copy is in mid-rebase. Either:
@@ -204,10 +205,10 @@ def rebase_branch(branch, parent, start_hash):
                  git config branch.%s.dormant true
                  git config branch.%s.dormant true
 
 
           And then run `git rebase-update` again to resume.
           And then run `git rebase-update` again to resume.
-          """ % branch)
+          """ % branch))
           return False
           return False
   else:
   else:
-    print '%s up-to-date' % branch
+    print('%s up-to-date' % branch)
 
 
   git.remove_merge_base(branch)
   git.remove_merge_base(branch)
   git.get_or_create_merge_base(branch)
   git.get_or_create_merge_base(branch)
@@ -243,10 +244,8 @@ def main(args=None):
   if git.in_rebase():
   if git.in_rebase():
     # TODO(iannucci): Be able to resume rebase with flags like --continue,
     # TODO(iannucci): Be able to resume rebase with flags like --continue,
     # etc.
     # etc.
-    print (
-      'Rebase in progress. Please complete the rebase before running '
-      '`git rebase-update`.'
-    )
+    print('Rebase in progress. Please complete the rebase before running '
+          '`git rebase-update`.')
     return 1
     return 1
 
 
   return_branch, return_workdir = find_return_branch_workdir()
   return_branch, return_workdir = find_return_branch_workdir()
@@ -254,7 +253,7 @@ def main(args=None):
 
 
   if git.current_branch() == 'HEAD':
   if git.current_branch() == 'HEAD':
     if git.run('status', '--porcelain'):
     if git.run('status', '--porcelain'):
-      print 'Cannot rebase-update with detached head + uncommitted changes.'
+      print('Cannot rebase-update with detached head + uncommitted changes.')
       return 1
       return 1
   else:
   else:
     git.freeze()  # just in case there are any local changes.
     git.freeze()  # just in case there are any local changes.
@@ -267,7 +266,7 @@ def main(args=None):
   if branches_to_rebase:
   if branches_to_rebase:
     skipped = set(skipped).intersection(branches_to_rebase)
     skipped = set(skipped).intersection(branches_to_rebase)
   for branch in skipped:
   for branch in skipped:
-    print 'Skipping %s: No upstream specified' % branch
+    print('Skipping %s: No upstream specified' % branch)
 
 
   if not opts.no_fetch:
   if not opts.no_fetch:
     fetch_remotes(branch_tree)
     fetch_remotes(branch_tree)
@@ -288,28 +287,28 @@ def main(args=None):
     if branches_to_rebase and branch not in branches_to_rebase:
     if branches_to_rebase and branch not in branches_to_rebase:
       continue
       continue
     if git.is_dormant(branch):
     if git.is_dormant(branch):
-      print 'Skipping dormant branch', branch
+      print('Skipping dormant branch', branch)
     else:
     else:
       ret = rebase_branch(branch, parent, merge_base[branch])
       ret = rebase_branch(branch, parent, merge_base[branch])
       if not ret:
       if not ret:
         retcode = 1
         retcode = 1
 
 
         if opts.keep_going:
         if opts.keep_going:
-          print '--keep-going set, continuing with next branch.'
+          print('--keep-going set, continuing with next branch.')
           unrebased_branches.append(branch)
           unrebased_branches.append(branch)
           if git.in_rebase():
           if git.in_rebase():
             git.run_with_retcode('rebase', '--abort')
             git.run_with_retcode('rebase', '--abort')
           if git.in_rebase():  # pragma: no cover
           if git.in_rebase():  # pragma: no cover
-            print 'Failed to abort rebase. Something is really wrong.'
+            print('Failed to abort rebase. Something is really wrong.')
             break
             break
         else:
         else:
           break
           break
 
 
   if unrebased_branches:
   if unrebased_branches:
-    print
-    print 'The following branches could not be cleanly rebased:'
+    print()
+    print('The following branches could not be cleanly rebased:')
     for branch in unrebased_branches:
     for branch in unrebased_branches:
-      print '  %s' % branch
+      print('  %s' % branch)
 
 
   if not retcode:
   if not retcode:
     remove_empty_branches(branch_tree)
     remove_empty_branches(branch_tree)
@@ -321,10 +320,8 @@ def main(args=None):
     else:
     else:
       root_branch = git.root()
       root_branch = git.root()
       if return_branch != 'HEAD':
       if return_branch != 'HEAD':
-        print (
-          "%r was merged with its parent, checking out %r instead."
-          % (return_branch, root_branch)
-        )
+        print("%r was merged with its parent, checking out %r instead." %
+              (return_branch, root_branch))
       git.run('checkout', root_branch)
       git.run('checkout', root_branch)
 
 
     # return_workdir may also not be there any more.
     # return_workdir may also not be there any more.
@@ -332,10 +329,8 @@ def main(args=None):
       try:
       try:
         os.chdir(return_workdir)
         os.chdir(return_workdir)
       except OSError as e:
       except OSError as e:
-        print (
-          "Unable to return to original workdir %r: %s"
-          % (return_workdir, e)
-        )
+        print(
+            "Unable to return to original workdir %r: %s" % (return_workdir, e))
     git.set_config(STARTING_BRANCH_KEY, '')
     git.set_config(STARTING_BRANCH_KEY, '')
     git.set_config(STARTING_WORKDIR_KEY, '')
     git.set_config(STARTING_WORKDIR_KEY, '')
 
 

+ 8 - 6
git_reparent_branch.py

@@ -5,6 +5,8 @@
 
 
 """Change the upstream of the current branch."""
 """Change the upstream of the current branch."""
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import sys
 import sys
 
 
@@ -55,7 +57,7 @@ def main(args):
       "`git branch --set-upstream-to` to assign it one.\n\nPlease assign an "
       "`git branch --set-upstream-to` to assign it one.\n\nPlease assign an "
       "upstream branch and then run this command again."
       "upstream branch and then run this command again."
     )
     )
-    print >> sys.stderr, msg % branch
+    print(msg % branch, file=sys.stderr)
     return 1
     return 1
 
 
   mbase = get_or_create_merge_base(branch, cur_parent)
   mbase = get_or_create_merge_base(branch, cur_parent)
@@ -67,17 +69,17 @@ def main(args):
   try:
   try:
     run('show-ref', new_parent)
     run('show-ref', new_parent)
   except subprocess2.CalledProcessError:
   except subprocess2.CalledProcessError:
-    print >> sys.stderr, 'fatal: invalid reference: %s' % new_parent
+    print('fatal: invalid reference: %s' % new_parent, file=sys.stderr)
     return 1
     return 1
 
 
   if new_parent in all_tags:
   if new_parent in all_tags:
-    print ("Reparenting %s to track %s [tag] (was %s)"
-           % (branch, new_parent, cur_parent))
+    print("Reparenting %s to track %s [tag] (was %s)" % (branch, new_parent,
+                                                         cur_parent))
     set_branch_config(branch, 'remote', '.')
     set_branch_config(branch, 'remote', '.')
     set_branch_config(branch, 'merge', new_parent)
     set_branch_config(branch, 'merge', new_parent)
   else:
   else:
-    print ("Reparenting %s to track %s (was %s)"
-           % (branch, new_parent, cur_parent))
+    print("Reparenting %s to track %s (was %s)" % (branch, new_parent,
+                                                   cur_parent))
     run('branch', '--set-upstream-to', new_parent, branch)
     run('branch', '--set-upstream-to', new_parent, branch)
 
 
   manual_merge_base(branch, mbase, new_parent)
   manual_merge_base(branch, mbase, new_parent)

+ 4 - 2
git_upstream_diff.py

@@ -3,6 +3,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # found in the LICENSE file.
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import sys
 import sys
 
 
@@ -31,12 +33,12 @@ def main(args):
     opts.branch = current_branch
     opts.branch = current_branch
 
 
   if not opts.branch or opts.branch == 'HEAD':
   if not opts.branch or opts.branch == 'HEAD':
-    print 'fatal: Cannot perform git-upstream-diff while not on a branch'
+    print('fatal: Cannot perform git-upstream-diff while not on a branch')
     return 1
     return 1
 
 
   par = git.upstream(opts.branch)
   par = git.upstream(opts.branch)
   if not par:
   if not par:
-    print 'fatal: No upstream configured for branch \'%s\'' % opts.branch
+    print('fatal: No upstream configured for branch \'%s\'' % opts.branch)
     return 1
     return 1
 
 
   cmd = [git.GIT_EXE, '-c', 'core.quotePath=false',
   cmd = [git.GIT_EXE, '-c', 'core.quotePath=false',

+ 6 - 4
gn.py

@@ -12,6 +12,8 @@ binary. It will also automatically try to find the gn binary when run inside
 the chrome source tree, so users can just type "gn" on the command line
 the chrome source tree, so users can just type "gn" on the command line
 (normally depot_tools is on the path)."""
 (normally depot_tools is on the path)."""
 
 
+from __future__ import print_function
+
 import gclient_paths
 import gclient_paths
 import os
 import os
 import subprocess
 import subprocess
@@ -56,13 +58,13 @@ def main(args):
   # inside of buildtools.
   # inside of buildtools.
   bin_path = gclient_paths.GetBuildtoolsPlatformBinaryPath()
   bin_path = gclient_paths.GetBuildtoolsPlatformBinaryPath()
   if not bin_path:
   if not bin_path:
-    print >> sys.stderr, ('gn.py: Could not find checkout in any parent of '
-                          'the current path.\nThis must be run inside a '
-                          'checkout.')
+    print('gn.py: Could not find checkout in any parent of the current path.\n'
+          'This must be run inside a checkout.', file=sys.stderr)
     return 1
     return 1
   gn_path = os.path.join(bin_path, 'gn' + gclient_paths.GetExeSuffix())
   gn_path = os.path.join(bin_path, 'gn' + gclient_paths.GetExeSuffix())
   if not os.path.exists(gn_path):
   if not os.path.exists(gn_path):
-    print >> sys.stderr, 'gn.py: Could not find gn executable at: %s' % gn_path
+    print(
+        'gn.py: Could not find gn executable at: %s' % gn_path, file=sys.stderr)
     return 2
     return 2
   else:
   else:
     return subprocess.call([gn_path] + args[1:])
     return subprocess.call([gn_path] + args[1:])

+ 3 - 1
man/src/filter_demo_output.py

@@ -3,6 +3,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # found in the LICENSE file.
 
 
+from __future__ import print_function
+
 import collections
 import collections
 import os
 import os
 import re
 import re
@@ -13,7 +15,7 @@ from xml.sax.saxutils import escape
 from cStringIO import StringIO
 from cStringIO import StringIO
 
 
 if not os.path.exists('ansi2html'):
 if not os.path.exists('ansi2html'):
-  print 'You must run ./make_docs.sh once before running this script.'
+  print('You must run ./make_docs.sh once before running this script.')
   sys.exit(1)
   sys.exit(1)
 
 
 # This dependency is pulled in by make_docs.sh
 # This dependency is pulled in by make_docs.sh

+ 13 - 11
my_activity.py

@@ -32,6 +32,8 @@ Example:
 # >
 # >
 # [VPYTHON:END]
 # [VPYTHON:END]
 
 
+from __future__ import print_function
+
 import collections
 import collections
 import contextlib
 import contextlib
 from datetime import datetime
 from datetime import datetime
@@ -566,8 +568,8 @@ class MyActivity(object):
     })
     })
 
 
   def print_heading(self, heading):
   def print_heading(self, heading):
-    print
-    print self.options.output_format_heading.format(heading=heading)
+    print()
+    print(self.options.output_format_heading.format(heading=heading))
 
 
   def match(self, author):
   def match(self, author):
     if '@' in self.user:
     if '@' in self.user:
@@ -654,8 +656,8 @@ class MyActivity(object):
     }
     }
     if optional_values is not None:
     if optional_values is not None:
       values.update(optional_values)
       values.update(optional_values)
-    print DefaultFormatter().format(output_format, **values).encode(
-        sys.getdefaultencoding())
+    print(DefaultFormatter().format(output_format,
+                                    **values).encode(sys.getdefaultencoding()))
 
 
 
 
   def filter_issue(self, issue, should_filter_by_user=True):
   def filter_issue(self, issue, should_filter_by_user=True):
@@ -802,25 +804,25 @@ class MyActivity(object):
       if changes_by_issue_uid[issue_uid] or not skip_empty_own:
       if changes_by_issue_uid[issue_uid] or not skip_empty_own:
         self.print_issue(issues[issue_uid])
         self.print_issue(issues[issue_uid])
       if changes_by_issue_uid[issue_uid]:
       if changes_by_issue_uid[issue_uid]:
-        print
+        print()
         for change in changes_by_issue_uid[issue_uid]:
         for change in changes_by_issue_uid[issue_uid]:
-          print '   ',  # this prints one space due to comma, but no newline
+          print('    ', end='')  # this prints no newline
           self.print_change(change)
           self.print_change(change)
-        print
+        print()
 
 
     # Changes referencing others' issues.
     # Changes referencing others' issues.
     for issue_uid in ref_issues:
     for issue_uid in ref_issues:
       assert changes_by_ref_issue_uid[issue_uid]
       assert changes_by_ref_issue_uid[issue_uid]
       self.print_issue(ref_issues[issue_uid])
       self.print_issue(ref_issues[issue_uid])
       for change in changes_by_ref_issue_uid[issue_uid]:
       for change in changes_by_ref_issue_uid[issue_uid]:
-        print '',  # this prints one space due to comma, but no newline
+        print('', end=' ')  # this prints one space due to comma, but no newline
         self.print_change(change)
         self.print_change(change)
 
 
     # Changes referencing no issues.
     # Changes referencing no issues.
     if changes_without_issue:
     if changes_without_issue:
-      print self.options.output_format_no_url.format(title='Other changes')
+      print(self.options.output_format_no_url.format(title='Other changes'))
       for change in changes_without_issue:
       for change in changes_without_issue:
-        print '',  # this prints one space due to comma, but no newline
+        print('', end=' ')  # this prints one space due to comma, but no newline
         self.print_change(change)
         self.print_change(change)
 
 
   def print_activity(self):
   def print_activity(self):
@@ -855,7 +857,7 @@ class MyActivity(object):
       'changes': format_for_json_dump(self.changes),
       'changes': format_for_json_dump(self.changes),
       'issues': format_for_json_dump(self.issues)
       'issues': format_for_json_dump(self.issues)
     }
     }
-    print json.dumps(output, indent=2, cls=PythonObjectEncoder)
+    print(json.dumps(output, indent=2, cls=PythonObjectEncoder))
 
 
 
 
 def main():
 def main():

+ 34 - 29
my_reviews.py

@@ -8,6 +8,9 @@
 Example:
 Example:
   - my_reviews.py -r me@chromium.org -Q  for stats for last quarter.
   - my_reviews.py -r me@chromium.org -Q  for stats for last quarter.
 """
 """
+
+from __future__ import print_function
+
 import datetime
 import datetime
 import math
 import math
 import optparse
 import optparse
@@ -22,7 +25,7 @@ try:
   import dateutil.parser
   import dateutil.parser
   from dateutil.relativedelta import relativedelta
   from dateutil.relativedelta import relativedelta
 except ImportError:
 except ImportError:
-  print 'python-dateutil package required'
+  print('python-dateutil package required')
   exit(1)
   exit(1)
 
 
 
 
@@ -214,13 +217,13 @@ def print_issue(issue, reviewer, stats):
     reviewed = ''
     reviewed = ''
 
 
   # More information is available, print issue.keys() to see them.
   # More information is available, print issue.keys() to see them.
-  print '%7d %10s %3s %14s %-15s  %s' % (
+  print('%7d %10s %3s %14s %-15s  %s' % (
       issue['issue'],
       issue['issue'],
       issue['created'][:10],
       issue['created'][:10],
       reviewed,
       reviewed,
       latency,
       latency,
       issue['owner_email'],
       issue['owner_email'],
-      ', '.join(sorted(issue['reviewers'])))
+      ', '.join(sorted(issue['reviewers']))))
 
 
 
 
 def print_reviews(
 def print_reviews(
@@ -232,8 +235,9 @@ def print_reviews(
   stats = Stats()
   stats = Stats()
 
 
   # Column sizes need to match print_issue() output.
   # Column sizes need to match print_issue() output.
-  print >> sys.stderr, (
-      'Issue   Creation   Did         Latency Owner           Reviewers')
+  print(
+      'Issue   Creation   Did         Latency Owner           Reviewers',
+      file=sys.stderr)
 
 
   # See def search() in rietveld.py to see all the filters you can use.
   # See def search() in rietveld.py to see all the filters you can use.
   issues = []
   issues = []
@@ -253,39 +257,40 @@ def print_reviews(
     last_day = issues[-1]['created'][:10]
     last_day = issues[-1]['created'][:10]
   stats.finalize(first_day, last_day)
   stats.finalize(first_day, last_day)
 
 
-  print >> sys.stderr, (
+  print(
       '%s reviewed %d issues out of %d (%1.1f%%). %d were self-review.' %
       '%s reviewed %d issues out of %d (%1.1f%%). %d were self-review.' %
       (reviewer, stats.actually_reviewed, stats.total, stats.percent_done,
       (reviewer, stats.actually_reviewed, stats.total, stats.percent_done,
-        stats.self_review))
-  print >> sys.stderr, (
-      '%4.1f review request/day during %3d days   (%4.1f r/d done).' % (
-      stats.review_per_day, stats.days, stats.review_done_per_day))
-  print >> sys.stderr, (
-      '%4d were drive-bys                       (%5.1f%% of reviews done).' % (
-        stats.drive_by, stats.percent_drive_by))
-  print >> sys.stderr, (
-      '%4d were requested over IM or irc        (%5.1f%% of reviews done).' % (
-        stats.not_requested, stats.percent_not_requested))
-  print >> sys.stderr, (
-      ('%4d issues LGTM\'d                        (%5.1f%% of reviews done),'
-       ' gave multiple LGTMs on %d issues.') % (
-      stats.lgtms, stats.percent_lgtm, stats.multiple_lgtms))
-  print >> sys.stderr, (
+       stats.self_review), file=sys.stderr)
+  print(
+      '%4.1f review request/day during %3d days   (%4.1f r/d done).' %
+      (stats.review_per_day, stats.days, stats.review_done_per_day),
+      file=sys.stderr)
+  print(
+      '%4d were drive-bys                       (%5.1f%% of reviews done).' %
+      (stats.drive_by, stats.percent_drive_by), file=sys.stderr)
+  print(
+      '%4d were requested over IM or irc        (%5.1f%% of reviews done).' %
+      (stats.not_requested, stats.percent_not_requested), file=sys.stderr)
+  print(
+      '%4d issues LGTM\'d                        (%5.1f%% of reviews done),'
+      ' gave multiple LGTMs on %d issues.' %
+      (stats.lgtms, stats.percent_lgtm, stats.multiple_lgtms), file=sys.stderr)
+  print(
       'Average latency from request to first comment is %s.' %
       'Average latency from request to first comment is %s.' %
-      to_time(stats.average_latency))
-  print >> sys.stderr, (
+      to_time(stats.average_latency), file=sys.stderr)
+  print(
       'Median latency from request to first comment is %s.' %
       'Median latency from request to first comment is %s.' %
-      to_time(stats.median_latency))
+      to_time(stats.median_latency), file=sys.stderr)
 
 
 
 
 def print_count(
 def print_count(
     reviewer, created_after, created_before, instance_url, auth_config):
     reviewer, created_after, created_before, instance_url, auth_config):
   remote = rietveld.Rietveld(instance_url, auth_config)
   remote = rietveld.Rietveld(instance_url, auth_config)
-  print len(list(remote.search(
+  print(len(list(remote.search(
       reviewer=reviewer,
       reviewer=reviewer,
       created_after=created_after,
       created_after=created_after,
       created_before=created_before,
       created_before=created_before,
-      keys_only=True)))
+      keys_only=True))))
 
 
 
 
 def get_previous_quarter(today):
 def get_previous_quarter(today):
@@ -354,12 +359,12 @@ def main():
   if options.reviewer is None:
   if options.reviewer is None:
     parser.error('$EMAIL_ADDRESS and $USER are not set, please use -r')
     parser.error('$EMAIL_ADDRESS and $USER are not set, please use -r')
 
 
-  print >> sys.stderr, 'Searching for reviews by %s' % options.reviewer
+  print('Searching for reviews by %s' % options.reviewer, file=sys.stderr)
   if options.last_quarter:
   if options.last_quarter:
     options.begin = begin
     options.begin = begin
     options.end = end
     options.end = end
-    print >> sys.stderr, 'Using range %s to %s' % (
-        options.begin, options.end)
+    print('Using range %s to %s' %
+        (options.begin, options.end), file=sys.stderr)
   else:
   else:
     if options.begin is None or options.end is None:
     if options.begin is None or options.end is None:
       parser.error('Please specify either --last_quarter or --begin and --end')
       parser.error('Please specify either --last_quarter or --begin and --end')

+ 4 - 2
ninjalog_uploader_wrapper.py

@@ -3,6 +3,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # found in the LICENSE file.
 
 
+from __future__ import print_function
+
 import os
 import os
 import subprocess
 import subprocess
 import json
 import json
@@ -40,7 +42,7 @@ def SaveConfig(config):
 def ShowMessage(countdown):
 def ShowMessage(countdown):
     whitelisted = '\n'.join(['  * %s' % config for config in
     whitelisted = '\n'.join(['  * %s' % config for config in
                              ninjalog_uploader.WHITELISTED_CONFIGS])
                              ninjalog_uploader.WHITELISTED_CONFIGS])
-    print """
+    print("""
 Your ninjalog will be uploaded to build stats server. The uploaded log will be
 Your ninjalog will be uploaded to build stats server. The uploaded log will be
 used to analyze user side build performance.
 used to analyze user side build performance.
 
 
@@ -67,7 +69,7 @@ You can find a more detailed explanation in
 %s
 %s
 
 
 """ % (whitelisted, countdown, __file__, __file__,
 """ % (whitelisted, countdown, __file__, __file__,
-       os.path.abspath(os.path.join(THIS_DIR, "ninjalog.README.md")))
+       os.path.abspath(os.path.join(THIS_DIR, "ninjalog.README.md"))))
 
 
 
 
 def main():
 def main():

+ 3 - 1
owners_finder.py

@@ -4,6 +4,8 @@
 
 
 """Interactive tool for finding reviewers/owners for a change."""
 """Interactive tool for finding reviewers/owners for a change."""
 
 
+from __future__ import print_function
+
 import os
 import os
 import copy
 import copy
 import owners as owners_module
 import owners as owners_module
@@ -354,7 +356,7 @@ class OwnersFinder(object):
     return '  ' * self.indentation
     return '  ' * self.indentation
 
 
   def writeln(self, text=''):
   def writeln(self, text=''):
-    print self.print_indent() + text
+    print(self.print_indent() + text)
 
 
   def hr(self):
   def hr(self):
     self.writeln('=====================')
     self.writeln('=====================')

+ 17 - 15
post_build_ninja_summary.py

@@ -56,6 +56,8 @@ will have a weighted time that is the same or similar to its elapsed time. A
 compile that runs in parallel with 999 other compiles will have a weighted time
 compile that runs in parallel with 999 other compiles will have a weighted time
 that is tiny."""
 that is tiny."""
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import errno
 import errno
 import os
 import os
@@ -100,7 +102,7 @@ class Target:
         # Allow for modest floating-point errors
         # Allow for modest floating-point errors
         epsilon = 0.000002
         epsilon = 0.000002
         if (self.weighted_duration > self.Duration() + epsilon):
         if (self.weighted_duration > self.Duration() + epsilon):
-          print '%s > %s?' % (self.weighted_duration, self.Duration())
+          print('%s > %s?' % (self.weighted_duration, self.Duration()))
         assert(self.weighted_duration <= self.Duration() + epsilon)
         assert(self.weighted_duration <= self.Duration() + epsilon)
         return self.weighted_duration
         return self.weighted_duration
 
 
@@ -256,16 +258,16 @@ def SummarizeEntries(entries):
 
 
     # Warn if the sum of weighted times is off by more than half a second.
     # Warn if the sum of weighted times is off by more than half a second.
     if abs(length - weighted_total) > 500:
     if abs(length - weighted_total) > 500:
-      print 'Discrepancy!!! Length = %.3f, weighted total = %.3f' % (
-            length, weighted_total)
+      print('Discrepancy!!! Length = %.3f, weighted total = %.3f' % (
+            length, weighted_total))
 
 
     # Print the slowest build steps (by weighted time).
     # Print the slowest build steps (by weighted time).
-    print '    Longest build steps:'
+    print('    Longest build steps:')
     entries.sort(key=lambda x: x.WeightedDuration())
     entries.sort(key=lambda x: x.WeightedDuration())
     for target in entries[-long_count:]:
     for target in entries[-long_count:]:
-      print '      %8.1f weighted s to build %s (%.1f s CPU time)' % (
+      print('      %8.1f weighted s to build %s (%.1f s CPU time)' % (
             target.WeightedDuration(),
             target.WeightedDuration(),
-            target.DescribeTargets(), target.Duration())
+            target.DescribeTargets(), target.Duration()))
 
 
     # Sum up the time by file extension/type of the output file
     # Sum up the time by file extension/type of the output file
     count_by_ext = {}
     count_by_ext = {}
@@ -279,21 +281,21 @@ def SummarizeEntries(entries):
               0) + target.WeightedDuration()
               0) + target.WeightedDuration()
       count_by_ext[extension] = count_by_ext.get(extension, 0) + 1
       count_by_ext[extension] = count_by_ext.get(extension, 0) + 1
 
 
-    print '    Time by build-step type:'
+    print('    Time by build-step type:')
     # Copy to a list with extension name and total time swapped, to (time, ext)
     # Copy to a list with extension name and total time swapped, to (time, ext)
     weighted_time_by_ext_sorted = sorted((y, x) for (x, y) in
     weighted_time_by_ext_sorted = sorted((y, x) for (x, y) in
                                           weighted_time_by_ext.items())
                                           weighted_time_by_ext.items())
     # Print the slowest build target types (by weighted time):
     # Print the slowest build target types (by weighted time):
     for time, extension in weighted_time_by_ext_sorted[-long_ext_count:]:
     for time, extension in weighted_time_by_ext_sorted[-long_ext_count:]:
-        print ('      %8.1f s weighted time to generate %d %s files '
-               '(%1.1f s CPU time)') % (time, count_by_ext[extension],
-                                        extension, time_by_ext[extension])
+        print('      %8.1f s weighted time to generate %d %s files '
+               '(%1.1f s CPU time)' % (time, count_by_ext[extension],
+                                        extension, time_by_ext[extension]))
 
 
-    print '    %.1f s weighted time (%.1f s CPU time, %1.1fx parallelism)' % (
+    print('    %.1f s weighted time (%.1f s CPU time, %1.1fx parallelism)' % (
           length, total_cpu_time,
           length, total_cpu_time,
-          total_cpu_time * 1.0 / length)
-    print '    %d build steps completed, average of %1.2f/s' % (
-          len(entries), len(entries) / (length))
+          total_cpu_time * 1.0 / length))
+    print('    %d build steps completed, average of %1.2f/s' % (
+          len(entries), len(entries) / (length)))
 
 
 
 
 def main():
 def main():
@@ -314,7 +316,7 @@ def main():
         entries = ReadTargets(log, False)
         entries = ReadTargets(log, False)
         SummarizeEntries(entries)
         SummarizeEntries(entries)
     except IOError:
     except IOError:
-      print 'Log file %r not found, no build summary created.' % log_file
+      print('Log file %r not found, no build summary created.' % log_file)
       return errno.ENOENT
       return errno.ENOENT
 
 
 
 

+ 3 - 1
presubmit_canned_checks.py

@@ -4,6 +4,8 @@
 
 
 """Generic presubmit checks that can be reused by other presubmit checks."""
 """Generic presubmit checks that can be reused by other presubmit checks."""
 
 
+from __future__ import print_function
+
 import os as _os
 import os as _os
 _HERE = _os.path.dirname(_os.path.abspath(__file__))
 _HERE = _os.path.dirname(_os.path.abspath(__file__))
 
 
@@ -1111,7 +1113,7 @@ def PanProjectChecks(input_api, output_api,
     if snapshot_memory:
     if snapshot_memory:
       delta_ms = int(1000*(dt2 - snapshot_memory[0]))
       delta_ms = int(1000*(dt2 - snapshot_memory[0]))
       if delta_ms > 500:
       if delta_ms > 500:
-        print "  %s took a long time: %dms" % (snapshot_memory[1], delta_ms)
+        print("  %s took a long time: %dms" % (snapshot_memory[1], delta_ms))
     snapshot_memory[:] = (dt2, msg)
     snapshot_memory[:] = (dt2, msg)
 
 
   snapshot("checking owners files format")
   snapshot("checking owners files format")

+ 4 - 2
presubmit_support.py

@@ -6,6 +6,8 @@
 """Enables directory-specific presubmit checks to run at upload and/or commit.
 """Enables directory-specific presubmit checks to run at upload and/or commit.
 """
 """
 
 
+from __future__ import print_function
+
 __version__ = '1.8.0'
 __version__ = '1.8.0'
 
 
 # TODO(joi) Add caching where appropriate/needed. The API is designed to allow
 # TODO(joi) Add caching where appropriate/needed. The API is designed to allow
@@ -1715,8 +1717,8 @@ def main(argv=None):
           options.parallel)
           options.parallel)
     return not results.should_continue()
     return not results.should_continue()
   except PresubmitFailure, e:
   except PresubmitFailure, e:
-    print >> sys.stderr, e
-    print >> sys.stderr, 'Maybe your depot_tools is out of date?'
+    print(e, file=sys.stderr)
+    print('Maybe your depot_tools is out of date?', file=sys.stderr)
     return 2
     return 2
 
 
 
 

+ 3 - 1
rietveld.py

@@ -14,6 +14,8 @@ The following hypothesis are made:
   - A patch set cannot be modified
   - A patch set cannot be modified
 """
 """
 
 
+from __future__ import print_function
+
 import copy
 import copy
 import errno
 import errno
 import json
 import json
@@ -474,7 +476,7 @@ class Rietveld(object):
         # If reaching this line, loop again. Uses a small backoff.
         # If reaching this line, loop again. Uses a small backoff.
         time.sleep(min(10, 1+retry*2))
         time.sleep(min(10, 1+retry*2))
     except urllib2.HTTPError as e:
     except urllib2.HTTPError as e:
-      print 'Request to %s failed: %s' % (e.geturl(), e.read())
+      print('Request to %s failed: %s' % (e.geturl(), e.read()))
       raise
       raise
     finally:
     finally:
       upload.ErrorExit = old_error_exit
       upload.ErrorExit = old_error_exit

+ 2 - 0
roll_dep.py

@@ -9,6 +9,8 @@ Works only with git checkout and git dependencies.  Currently this
 script will always roll to the tip of to origin/master.
 script will always roll to the tip of to origin/master.
 """
 """
 
 
+from __future__ import print_function
+
 import argparse
 import argparse
 import collections
 import collections
 import gclient_eval
 import gclient_eval

+ 14 - 11
roll_dep_svn.py

@@ -19,6 +19,8 @@ $ git add DEPS
 $ git commit
 $ git commit
 """
 """
 
 
+from __future__ import print_function
+
 import ast
 import ast
 import optparse
 import optparse
 import os
 import os
@@ -134,8 +136,8 @@ def convert_svn_revision(dep_path, revision):
           try:
           try:
             svn_rev = int(line.split()[1].partition('@')[2])
             svn_rev = int(line.split()[1].partition('@')[2])
           except (IndexError, ValueError):
           except (IndexError, ValueError):
-            print >> sys.stderr, (
-                'WARNING: Could not parse svn revision out of "%s"' % line)
+            print('WARNING: Could not parse svn revision out of "%s"' % line,
+                file=sys.stderr)
             continue
             continue
           if not latest_svn_rev or int(svn_rev) > int(latest_svn_rev):
           if not latest_svn_rev or int(svn_rev) > int(latest_svn_rev):
             latest_svn_rev = svn_rev
             latest_svn_rev = svn_rev
@@ -357,15 +359,15 @@ def update_deps(deps_file, dep_path, dep_name, new_rev, comment):
           commit_msg = generate_commit_message(
           commit_msg = generate_commit_message(
               deps_locals['deps_os'][os_name.s], dep_path, dep_name, new_rev)
               deps_locals['deps_os'][os_name.s], dep_path, dep_name, new_rev)
   if not commit_msg:
   if not commit_msg:
-    print 'Could not find an entry in %s to update.' % deps_file
+    print('Could not find an entry in %s to update.' % deps_file)
     return 1
     return 1
 
 
-  print 'Pinning %s' % dep_name
-  print 'to revision %s' % new_rev
-  print 'in %s' % deps_file
+  print('Pinning %s' % dep_name)
+  print('to revision %s' % new_rev)
+  print('in %s' % deps_file)
   with open(deps_file, 'w') as fh:
   with open(deps_file, 'w') as fh:
     for line in deps_lines:
     for line in deps_lines:
-      print >> fh, line
+      print(line, file=fh)
   deps_file_dir = os.path.normpath(os.path.dirname(deps_file))
   deps_file_dir = os.path.normpath(os.path.dirname(deps_file))
   deps_file_root = Popen(
   deps_file_root = Popen(
       ['git', 'rev-parse', '--show-toplevel'],
       ['git', 'rev-parse', '--show-toplevel'],
@@ -396,7 +398,7 @@ def main(argv):
     # Only require the path to exist if the revision should be verified. A path
     # Only require the path to exist if the revision should be verified. A path
     # to e.g. os deps might not be checked out.
     # to e.g. os deps might not be checked out.
     if not os.path.isdir(dep_path):
     if not os.path.isdir(dep_path):
-      print >> sys.stderr, 'No such directory: %s' % arg_dep_path
+      print('No such directory: %s' % arg_dep_path, file=sys.stderr)
       return 1
       return 1
   if len(args) > 2:
   if len(args) > 2:
     deps_file = args[2]
     deps_file = args[2]
@@ -407,9 +409,9 @@ def main(argv):
   dep_name = posix_path(os.path.relpath(dep_path, gclient_root))
   dep_name = posix_path(os.path.relpath(dep_path, gclient_root))
   if options.no_verify_revision:
   if options.no_verify_revision:
     if not is_git_hash(revision):
     if not is_git_hash(revision):
-      print >> sys.stderr, (
+      print(
           'The passed revision %s must be a git hash when skipping revision '
           'The passed revision %s must be a git hash when skipping revision '
-          'verification.' % revision)
+          'verification.' % revision, file=sys.stderr)
       return 1
       return 1
     git_rev = revision
     git_rev = revision
     comment = None
     comment = None
@@ -417,7 +419,8 @@ def main(argv):
     git_rev, svn_rev = get_git_revision(dep_path, revision)
     git_rev, svn_rev = get_git_revision(dep_path, revision)
     comment = ('from svn revision %s' % svn_rev) if svn_rev else None
     comment = ('from svn revision %s' % svn_rev) if svn_rev else None
     if not git_rev:
     if not git_rev:
-      print >> sys.stderr, 'Could not find git revision matching %s.' % revision
+      print('Could not find git revision matching %s.' % revision,
+          file=sys.stderr)
       return 1
       return 1
   return update_deps(deps_file, dep_path, dep_name, git_rev, comment)
   return update_deps(deps_file, dep_path, dep_name, git_rev, comment)
 
 

+ 13 - 11
split_cl.py

@@ -5,6 +5,8 @@
 
 
 """Splits a branch into smaller branches and uploads CLs."""
 """Splits a branch into smaller branches and uploads CLs."""
 
 
+from __future__ import print_function
+
 import collections
 import collections
 import os
 import os
 import re
 import re
@@ -95,7 +97,7 @@ def UploadCl(refactor_branch, refactor_branch_upstream, directory, files,
   # Create a branch.
   # Create a branch.
   if not CreateBranchForDirectory(
   if not CreateBranchForDirectory(
       refactor_branch, directory, refactor_branch_upstream):
       refactor_branch, directory, refactor_branch_upstream):
-    print 'Skipping ' + directory + ' for which a branch already exists.'
+    print('Skipping ' + directory + ' for which a branch already exists.')
     return
     return
 
 
   # Checkout all changes to files in |files|.
   # Checkout all changes to files in |files|.
@@ -124,7 +126,7 @@ def UploadCl(refactor_branch, refactor_branch_upstream, directory, files,
     upload_args.append('--send-mail')
     upload_args.append('--send-mail')
   if enable_auto_submit:
   if enable_auto_submit:
     upload_args.append('--enable-auto-submit')
     upload_args.append('--enable-auto-submit')
-  print 'Uploading CL for ' + directory + '.'
+  print('Uploading CL for ' + directory + '.')
   cmd_upload(upload_args)
   cmd_upload(upload_args)
   if comment:
   if comment:
     changelist().AddComment(FormatDescriptionOrComment(comment, directory),
     changelist().AddComment(FormatDescriptionOrComment(comment, directory),
@@ -162,12 +164,12 @@ def PrintClInfo(cl_index, num_cls, directory, file_paths, description,
                                                  directory).splitlines()
                                                  directory).splitlines()
   indented_description = '\n'.join(['    ' + l for l in description_lines])
   indented_description = '\n'.join(['    ' + l for l in description_lines])
 
 
-  print 'CL {}/{}'.format(cl_index, num_cls)
-  print 'Path: {}'.format(directory)
-  print 'Reviewers: {}'.format(', '.join(reviewers))
-  print '\n' + indented_description + '\n'
-  print '\n'.join(file_paths)
-  print
+  print('CL {}/{}'.format(cl_index, num_cls))
+  print('Path: {}'.format(directory))
+  print('Reviewers: {}'.format(', '.join(reviewers)))
+  print('\n' + indented_description + '\n')
+  print('\n'.join(file_paths))
+  print()
 
 
 
 
 def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
 def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
@@ -197,7 +199,7 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
     files = change.AffectedFiles()
     files = change.AffectedFiles()
 
 
     if not files:
     if not files:
-      print 'Cannot split an empty CL.'
+      print('Cannot split an empty CL.')
       return 1
       return 1
 
 
     author = git.run('config', 'user.email').strip() or None
     author = git.run('config', 'user.email').strip() or None
@@ -216,12 +218,12 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
     print('Will split current branch (' + refactor_branch + ') into ' +
     print('Will split current branch (' + refactor_branch + ') into ' +
           str(num_cls) + ' CLs.\n')
           str(num_cls) + ' CLs.\n')
     if cq_dry_run and num_cls > CL_SPLIT_FORCE_LIMIT:
     if cq_dry_run and num_cls > CL_SPLIT_FORCE_LIMIT:
-      print (
+      print(
         'This will generate "%r" CLs. This many CLs can potentially generate'
         'This will generate "%r" CLs. This many CLs can potentially generate'
         ' too much load on the build infrastructure. Please email'
         ' too much load on the build infrastructure. Please email'
         ' infra-dev@chromium.org to ensure that this won\'t  break anything.'
         ' infra-dev@chromium.org to ensure that this won\'t  break anything.'
         ' The infra team reserves the right to cancel your jobs if they are'
         ' The infra team reserves the right to cancel your jobs if they are'
-        ' overloading the CQ.') % num_cls
+        ' overloading the CQ.' % num_cls)
       answer = raw_input('Proceed? (y/n):')
       answer = raw_input('Proceed? (y/n):')
       if answer.lower() != 'y':
       if answer.lower() != 'y':
         return 0
         return 0

+ 6 - 4
testing_support/coverage_utils.py

@@ -2,6 +2,8 @@
 # Use of this source code is governed by a BSD-style license that can be
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # found in the LICENSE file.
 
 
+from __future__ import print_function
+
 import distutils.version
 import distutils.version
 import os
 import os
 import sys
 import sys
@@ -13,12 +15,12 @@ ROOT_PATH = os.path.abspath(os.path.join(
 
 
 
 
 def native_error(msg, version):
 def native_error(msg, version):
-  print textwrap.dedent("""\
+  print(textwrap.dedent("""\
   ERROR: Native python-coverage (version: %s) is required to be
   ERROR: Native python-coverage (version: %s) is required to be
   installed on your PYTHONPATH to run this test. Recommendation:
   installed on your PYTHONPATH to run this test. Recommendation:
      sudo apt-get install pip
      sudo apt-get install pip
      sudo pip install --upgrade coverage
      sudo pip install --upgrade coverage
-  %s""") % (version, msg)
+  %s""") % (version, msg))
   sys.exit(1)
   sys.exit(1)
 
 
 def covered_main(includes, require_native=None, required_percentage=100.0,
 def covered_main(includes, require_native=None, required_percentage=100.0,
@@ -56,7 +58,7 @@ def covered_main(includes, require_native=None, required_percentage=100.0,
       sys.path.insert(0, os.path.join(ROOT_PATH, 'third_party'))
       sys.path.insert(0, os.path.join(ROOT_PATH, 'third_party'))
       import coverage
       import coverage
     else:
     else:
-      print ("ERROR: python-coverage (%s) is required to be installed on your "
+      print("ERROR: python-coverage (%s) is required to be installed on your "
              "PYTHONPATH to run this test." % require_native)
              "PYTHONPATH to run this test." % require_native)
       sys.exit(1)
       sys.exit(1)
 
 
@@ -71,7 +73,7 @@ def covered_main(includes, require_native=None, required_percentage=100.0,
 
 
   COVERAGE.stop()
   COVERAGE.stop()
   if COVERAGE.report() < required_percentage:
   if COVERAGE.report() < required_percentage:
-    print 'FATAL: not at required %f%% coverage.' % required_percentage
+    print('FATAL: not at required %f%% coverage.' % required_percentage)
     retcode = 2
     retcode = 2
 
 
   return retcode
   return retcode

+ 4 - 2
testing_support/fake_repos.py

@@ -5,6 +5,8 @@
 
 
 """Generate fake repositories for testing."""
 """Generate fake repositories for testing."""
 
 
+from __future__ import print_function
+
 import atexit
 import atexit
 import datetime
 import datetime
 import errno
 import errno
@@ -931,7 +933,7 @@ class FakeReposTestBase(trial_dir.TestCase):
         result = result[1:]
         result = result[1:]
       # The exception trace makes it hard to read so dump it too.
       # The exception trace makes it hard to read so dump it too.
       if '\n' in result:
       if '\n' in result:
-        print result
+        print(result)
     self.assertEquals(expected, result, msg)
     self.assertEquals(expected, result, msg)
 
 
   def check(self, expected, results):
   def check(self, expected, results):
@@ -978,7 +980,7 @@ class FakeReposTestBase(trial_dir.TestCase):
 
 
 def main(argv):
 def main(argv):
   fake = FakeRepos()
   fake = FakeRepos()
-  print 'Using %s' % fake.root_dir
+  print('Using %s' % fake.root_dir)
   try:
   try:
     fake.set_up_git()
     fake.set_up_git()
     print('Fake setup, press enter to quit or Ctrl-C to keep the checkouts.')
     print('Fake setup, press enter to quit or Ctrl-C to keep the checkouts.')

+ 5 - 3
testing_support/gerrit-init.sh

@@ -72,6 +72,8 @@ curl --retry 30 --ssl-reqd -s $url | python <(cat <<EOF
 #
 #
 # ...and prints the name and md5sum of the corresponding *.war file.
 # ...and prints the name and md5sum of the corresponding *.war file.
 
 
+from __future__ import print_function
+
 import json
 import json
 import re
 import re
 import sys
 import sys
@@ -104,14 +106,14 @@ def _cmp(a, b):
 if requested_version:
 if requested_version:
   for info, version in items:
   for info, version in items:
     if version == requested_version:
     if version == requested_version:
-      print '"%s" "%s"' % (info['name'], info['md5Hash'])
+      print('"%s" "%s"' % (info['name'], info['md5Hash']))
       sys.exit(0)
       sys.exit(0)
-  print >> sys.stderr, 'No such Gerrit version: %s' % requested_version
+  print('No such Gerrit version: %s' % requested_version, file=sys.stderr)
   sys.exit(1)
   sys.exit(1)
 
 
 items.sort(cmp=_cmp)
 items.sort(cmp=_cmp)
 for x in items:
 for x in items:
-  print '"%s" "%s"' % (x[0]['name'], x[0]['md5Hash'])
+  print('"%s" "%s"' % (x[0]['name'], x[0]['md5Hash']))
   sys.exit(0)
   sys.exit(0)
 EOF
 EOF
 ) "$version" | xargs | while read name md5; do
 ) "$version" | xargs | while read name md5; do

+ 3 - 1
testing_support/gerrit_test_case.py

@@ -38,6 +38,8 @@ One gotcha: 'repo upload' will always attempt to use the ssh interface to talk
 to gerrit.
 to gerrit.
 """
 """
 
 
+from __future__ import print_function
+
 import collections
 import collections
 import errno
 import errno
 import netrc
 import netrc
@@ -363,7 +365,7 @@ class GerritTestCase(unittest.TestCase):
       # Announce that gerrit didn't shut down cleanly.
       # Announce that gerrit didn't shut down cleanly.
       msg = 'Test gerrit server (pid=%d) did not shut down cleanly.' % (
       msg = 'Test gerrit server (pid=%d) did not shut down cleanly.' % (
           gerrit_instance.gerrit_pid)
           gerrit_instance.gerrit_pid)
-      print >> sys.stderr, msg
+      print(msg, file=sys.stderr)
 
 
   @classmethod
   @classmethod
   def tearDownClass(cls):
   def tearDownClass(cls):

+ 3 - 1
testing_support/super_mox.py

@@ -4,6 +4,8 @@
 
 
 """Simplify unit tests based on pymox."""
 """Simplify unit tests based on pymox."""
 
 
+from __future__ import print_function
+
 import os
 import os
 import random
 import random
 import shutil
 import shutil
@@ -68,7 +70,7 @@ class TestCaseUtils(object):
     if actual_members != expected_members:
     if actual_members != expected_members:
       diff = ([i for i in actual_members if i not in expected_members] +
       diff = ([i for i in actual_members if i not in expected_members] +
               [i for i in expected_members if i not in actual_members])
               [i for i in expected_members if i not in actual_members])
-      print >> sys.stderr, diff
+      print(diff, file=sys.stderr)
     # pylint: disable=no-member
     # pylint: disable=no-member
     self.assertEqual(actual_members, expected_members)
     self.assertEqual(actual_members, expected_members)
 
 

+ 2 - 1
testing_support/trial_dir.py

@@ -2,6 +2,7 @@
 # Use of this source code is governed by a BSD-style license that can be
 # Use of this source code is governed by a BSD-style license that can be
 # found in the LICENSE file.
 # found in the LICENSE file.
 
 
+from __future__ import print_function
 
 
 import atexit
 import atexit
 import logging
 import logging
@@ -93,5 +94,5 @@ class TestCase(auto_stub.TestCase, TrialDirMixIn):
 if '-l' in sys.argv:
 if '-l' in sys.argv:
   # See SHOULD_LEAK definition in TrialDir for its purpose.
   # See SHOULD_LEAK definition in TrialDir for its purpose.
   TrialDir.SHOULD_LEAK = True
   TrialDir.SHOULD_LEAK = True
-  print 'Leaking!'
+  print('Leaking!')
   sys.argv.remove('-l')
   sys.argv.remove('-l')

+ 3 - 1
tests/checkout_test.py

@@ -5,6 +5,8 @@
 
 
 """Unit tests for checkout.py."""
 """Unit tests for checkout.py."""
 
 
+from __future__ import print_function
+
 import logging
 import logging
 import os
 import os
 import shutil
 import shutil
@@ -275,7 +277,7 @@ class GitBaseTest(BaseTest):
     return tree
     return tree
 
 
   def _test_prepare(self, co):
   def _test_prepare(self, co):
-    print co.prepare(None)
+    print(co.prepare(None))
 
 
 
 
 class GitCheckout(GitBaseTest):
 class GitCheckout(GitBaseTest):

+ 4 - 2
tests/download_from_google_storage_unittest.py

@@ -6,6 +6,8 @@
 
 
 """Unit tests for download_from_google_storage.py."""
 """Unit tests for download_from_google_storage.py."""
 
 
+from __future__ import print_function
+
 import optparse
 import optparse
 import os
 import os
 import Queue
 import Queue
@@ -77,11 +79,11 @@ class ChangedWorkingDirectory(object):
 
 
   def __enter__(self):
   def __enter__(self):
     self._old_cwd = os.getcwd()
     self._old_cwd = os.getcwd()
-    print "Enter directory = ", self._working_directory
+    print("Enter directory = ", self._working_directory)
     os.chdir(self._working_directory)
     os.chdir(self._working_directory)
 
 
   def __exit__(self, *_):
   def __exit__(self, *_):
-    print "Enter directory = ", self._old_cwd
+    print("Enter directory = ", self._old_cwd)
     os.chdir(self._old_cwd)
     os.chdir(self._old_cwd)
 
 
 
 

+ 8 - 6
tests/fix_encoding_test.py

@@ -6,6 +6,8 @@
 
 
 """Unit tests for fix_encoding.py."""
 """Unit tests for fix_encoding.py."""
 
 
+from __future__ import print_function
+
 import os
 import os
 import sys
 import sys
 import unittest
 import unittest
@@ -21,18 +23,18 @@ class FixEncodingTest(unittest.TestCase):
 
 
   def test_code_page(self):
   def test_code_page(self):
     # Make sure printing garbage won't throw.
     # Make sure printing garbage won't throw.
-    print self.text.encode() + '\xff'
-    print >> sys.stderr, self.text.encode() + '\xff'
+    print(self.text.encode() + '\xff')
+    print(self.text.encode() + '\xff', file=sys.stderr)
 
 
   def test_utf8(self):
   def test_utf8(self):
     # Make sure printing utf-8 works.
     # Make sure printing utf-8 works.
-    print self.text.encode('utf-8')
-    print >> sys.stderr, self.text.encode('utf-8')
+    print(self.text.encode('utf-8'))
+    print(self.text.encode('utf-8'), file=sys.stderr)
 
 
   def test_unicode(self):
   def test_unicode(self):
     # Make sure printing unicode works.
     # Make sure printing unicode works.
-    print self.text
-    print >> sys.stderr, self.text
+    print(self.text)
+    print(self.text, file=sys.stderr)
 
 
   def test_default_encoding(self):
   def test_default_encoding(self):
     self.assertEquals('utf-8', sys.getdefaultencoding())
     self.assertEquals('utf-8', sys.getdefaultencoding())

+ 1 - 1
tests/git_cl_test.py

@@ -1192,7 +1192,7 @@ class TestGitCl(TestCase):
           short_hostname=short_hostname,
           short_hostname=short_hostname,
           labels=labels)
           labels=labels)
     # Uncomment when debugging.
     # Uncomment when debugging.
-    # print '\n'.join(map(lambda x: '%2i: %s' % x, enumerate(self.calls)))
+    # print('\n'.join(map(lambda x: '%2i: %s' % x, enumerate(self.calls))))
     git_cl.main(['upload'] + upload_args)
     git_cl.main(['upload'] + upload_args)
 
 
   def test_gerrit_upload_without_change_id(self):
   def test_gerrit_upload_without_change_id(self):

+ 6 - 4
tests/git_common_test.py

@@ -5,6 +5,8 @@
 
 
 """Unit tests for git_common.py"""
 """Unit tests for git_common.py"""
 
 
+from __future__ import print_function
+
 import binascii
 import binascii
 import collections
 import collections
 import datetime
 import datetime
@@ -807,12 +809,12 @@ class GitFreezeThaw(git_test_utils.GitRepoReadWriteTestBase):
   def testAll(self):
   def testAll(self):
     def inner():
     def inner():
       with open('some/files/file2', 'a') as f2:
       with open('some/files/file2', 'a') as f2:
-        print >> f2, 'cool appended line'
+        print('cool appended line', file=f2)
       os.mkdir('some/other_files')
       os.mkdir('some/other_files')
       with open('some/other_files/subdir_file', 'w') as f3:
       with open('some/other_files/subdir_file', 'w') as f3:
-        print >> f3, 'new file!'
+        print('new file!', file=f3)
       with open('some/files/file5', 'w') as f5:
       with open('some/files/file5', 'w') as f5:
-        print >> f5, 'New file!1!one!'
+        print('New file!1!one!', file=f5)
 
 
       STATUS_1 = '\n'.join((
       STATUS_1 = '\n'.join((
         ' M some/files/file2',
         ' M some/files/file2',
@@ -829,7 +831,7 @@ class GitFreezeThaw(git_test_utils.GitRepoReadWriteTestBase):
 
 
       # Freeze group 2
       # Freeze group 2
       with open('some/files/file2', 'a') as f2:
       with open('some/files/file2', 'a') as f2:
-        print >> f2, 'new! appended line!'
+        print('new! appended line!', file=f2)
       self.assertEquals(self.repo.git('status', '--porcelain').stdout,
       self.assertEquals(self.repo.git('status', '--porcelain').stdout,
                         ' M some/files/file2\n')
                         ' M some/files/file2\n')
       self.assertIsNone(self.gc.freeze())
       self.assertIsNone(self.gc.freeze())

+ 5 - 3
tests/presubmit_unittest.py

@@ -184,9 +184,10 @@ class PresubmitUnittest(PresubmitTestsBase):
         'gerrit_util', 'git_footers', 'glob', 'inspect', 'itertools', 'json',
         'gerrit_util', 'git_footers', 'glob', 'inspect', 'itertools', 'json',
         'load_files', 'logging', 'main', 'marshal', 'multiprocessing',
         'load_files', 'logging', 'main', 'marshal', 'multiprocessing',
         'normpath', 'optparse', 'os', 'owners', 'owners_finder', 'pickle',
         'normpath', 'optparse', 'os', 'owners', 'owners_finder', 'pickle',
-        'presubmit_canned_checks', 'random', 're', 'scm', 'sigint_handler',
-        'signal', 'subprocess', 'sys', 'tempfile', 'threading', 'time',
-        'traceback', 'types', 'unittest', 'urllib2', 'urlparse', 'warn'
+        'presubmit_canned_checks', 'print_function', 'random', 're', 'scm',
+        'sigint_handler', 'signal', 'subprocess', 'sys', 'tempfile',
+        'threading', 'time', 'traceback', 'types', 'unittest', 'urllib2',
+        'urlparse', 'warn'
     ]
     ]
     # If this test fails, you should add the relevant test.
     # If this test fails, you should add the relevant test.
     self.compareMembers(presubmit, members)
     self.compareMembers(presubmit, members)
@@ -1670,6 +1671,7 @@ class CannedChecksUnittest(PresubmitTestsBase):
       'GetUnitTests', 'GetUnitTestsInDirectory', 'GetUnitTestsRecursively',
       'GetUnitTests', 'GetUnitTestsInDirectory', 'GetUnitTestsRecursively',
       'CheckCIPDManifest', 'CheckCIPDPackages', 'CheckCIPDClientDigests',
       'CheckCIPDManifest', 'CheckCIPDPackages', 'CheckCIPDClientDigests',
       'CheckChangedLUCIConfigs', 'CheckLucicfgGenOutput',
       'CheckChangedLUCIConfigs', 'CheckLucicfgGenOutput',
+      'print_function',
     ]
     ]
     # If this test fails, you should add the relevant test.
     # If this test fails, you should add the relevant test.
     self.compareMembers(presubmit_canned_checks, members)
     self.compareMembers(presubmit_canned_checks, members)

+ 1 - 1
third_party/upload.py

@@ -2553,7 +2553,7 @@ def main():
     os.environ['LC_ALL'] = 'C'
     os.environ['LC_ALL'] = 'C'
     RealMain(sys.argv)
     RealMain(sys.argv)
   except KeyboardInterrupt:
   except KeyboardInterrupt:
-    print
+    print()
     StatusUpdate("Interrupted.")
     StatusUpdate("Interrupted.")
     sys.exit(1)
     sys.exit(1)
   except auth.AuthenticationError as e:
   except auth.AuthenticationError as e:

+ 8 - 6
upload_to_google_storage.py

@@ -5,6 +5,8 @@
 
 
 """Uploads files to Google Storage content addressed."""
 """Uploads files to Google Storage content addressed."""
 
 
+from __future__ import print_function
+
 import hashlib
 import hashlib
 import optparse
 import optparse
 import os
 import os
@@ -170,7 +172,7 @@ def upload_to_google_storage(
       with open(filename + '.sha1', 'rb') as f:
       with open(filename + '.sha1', 'rb') as f:
         sha1_file = f.read(1024)
         sha1_file = f.read(1024)
       if not re.match('^([a-z0-9]{40})$', sha1_file):
       if not re.match('^([a-z0-9]{40})$', sha1_file):
-        print >> sys.stderr, 'Invalid sha1 hash file %s.sha1' % filename
+        print('Invalid sha1 hash file %s.sha1' % filename, file=sys.stderr)
         return 1
         return 1
       upload_queue.put((filename, sha1_file))
       upload_queue.put((filename, sha1_file))
       continue
       continue
@@ -191,19 +193,19 @@ def upload_to_google_storage(
   printer_thread.join()
   printer_thread.join()
 
 
   # Print timing information.
   # Print timing information.
-  print 'Hashing %s files took %1f seconds' % (
-      len(input_filenames), hashing_duration)
-  print 'Uploading took %1f seconds' % (time.time() - upload_timer)
+  print('Hashing %s files took %1f seconds' % (
+      len(input_filenames), hashing_duration))
+  print('Uploading took %1f seconds' % (time.time() - upload_timer))
 
 
   # See if we ran into any errors.
   # See if we ran into any errors.
   max_ret_code = 0
   max_ret_code = 0
   for ret_code, message in ret_codes.queue:
   for ret_code, message in ret_codes.queue:
     max_ret_code = max(ret_code, max_ret_code)
     max_ret_code = max(ret_code, max_ret_code)
     if message:
     if message:
-      print >> sys.stderr, message
+      print(message, file=sys.stderr)
 
 
   if not max_ret_code:
   if not max_ret_code:
-    print 'Success!'
+    print('Success!')
 
 
   return max_ret_code
   return max_ret_code
 
 

+ 5 - 3
watchlists.py

@@ -17,6 +17,8 @@ the watchers for files given on the command line. This is useful to verify
 changes to WATCHLISTS files.
 changes to WATCHLISTS files.
 """
 """
 
 
+from __future__ import print_function
+
 import logging
 import logging
 import os
 import os
 import re
 import re
@@ -126,12 +128,12 @@ class Watchlists(object):
 def main(argv):
 def main(argv):
   # Confirm that watchlists can be parsed and spew out the watchers
   # Confirm that watchlists can be parsed and spew out the watchers
   if len(argv) < 2:
   if len(argv) < 2:
-    print "Usage (from the base of repo):"
-    print "  %s [file-1] [file-2] ...." % argv[0]
+    print("Usage (from the base of repo):")
+    print("  %s [file-1] [file-2] ...." % argv[0])
     return 1
     return 1
   wl = Watchlists(os.getcwd())
   wl = Watchlists(os.getcwd())
   watchers = wl.GetWatchersForPaths(argv[1:])
   watchers = wl.GetWatchersForPaths(argv[1:])
-  print watchers
+  print(watchers)
 
 
 
 
 if __name__ == '__main__':
 if __name__ == '__main__':

+ 4 - 2
weekly

@@ -6,6 +6,8 @@
 """Display log of checkins of one particular developer since a particular
 """Display log of checkins of one particular developer since a particular
 date. Only works on git dependencies at the moment."""
 date. Only works on git dependencies at the moment."""
 
 
+from __future__ import print_function
+
 import gclient_utils
 import gclient_utils
 import optparse
 import optparse
 import os
 import os
@@ -25,8 +27,8 @@ def show_log(path, authors, since='1 week ago'):
                             stdout=subprocess.PIPE).communicate()[0].rstrip()
                             stdout=subprocess.PIPE).communicate()[0].rstrip()
 
 
   if len(status.splitlines()) > 0:
   if len(status.splitlines()) > 0:
-    print '---------- %s ----------' % path
-    print status
+    print('---------- %s ----------' % path)
+    print(status)
 
 
 
 
 def main():
 def main():

+ 38 - 36
win_toolchain/get_toolchain_if_necessary.py

@@ -26,6 +26,8 @@ future when a hypothetical VS2015 is released, the 2013 script will be
 maintained, and a new 2015 script would be added.
 maintained, and a new 2015 script would be added.
 """
 """
 
 
+from __future__ import print_function
+
 import hashlib
 import hashlib
 import json
 import json
 import optparse
 import optparse
@@ -52,12 +54,12 @@ elif sys.platform == "cygwin":
   try:
   try:
     import cygwinreg as winreg
     import cygwinreg as winreg
   except ImportError:
   except ImportError:
-    print ''
-    print 'CygWin does not natively support winreg but a replacement exists.'
-    print 'https://pypi.python.org/pypi/cygwinreg/'
-    print ''
-    print 'Try: easy_install cygwinreg'
-    print ''
+    print('')
+    print('CygWin does not natively support winreg but a replacement exists.')
+    print('https://pypi.python.org/pypi/cygwinreg/')
+    print('')
+    print('Try: easy_install cygwinreg')
+    print('')
     raise
     raise
 
 
 BASEDIR = os.path.dirname(os.path.abspath(__file__))
 BASEDIR = os.path.dirname(os.path.abspath(__file__))
@@ -144,25 +146,25 @@ def CalculateHash(root, expected_hash):
       timestamps_data_files.append(f[0])
       timestamps_data_files.append(f[0])
     missing_files = [f for f in timestamps_data_files if f not in file_list]
     missing_files = [f for f in timestamps_data_files if f not in file_list]
     if len(missing_files):
     if len(missing_files):
-      print ('%d files missing from the %s version of the toolchain:' %
+      print('%d files missing from the %s version of the toolchain:' %
              (len(missing_files), expected_hash))
              (len(missing_files), expected_hash))
       for f in missing_files[:10]:
       for f in missing_files[:10]:
-        print '\t%s' % f
+        print('\t%s' % f)
       if len(missing_files) > 10:
       if len(missing_files) > 10:
-        print '\t...'
+        print('\t...')
     extra_files = [f for f in file_list if f not in timestamps_data_files]
     extra_files = [f for f in file_list if f not in timestamps_data_files]
     if len(extra_files):
     if len(extra_files):
-      print ('%d extra files in the %s version of the toolchain:' %
+      print('%d extra files in the %s version of the toolchain:' %
              (len(extra_files), expected_hash))
              (len(extra_files), expected_hash))
       for f in extra_files[:10]:
       for f in extra_files[:10]:
-        print '\t%s' % f
+        print('\t%s' % f)
       if len(extra_files) > 10:
       if len(extra_files) > 10:
-        print '\t...'
+        print('\t...')
   if matches:
   if matches:
     return timestamps_data['sha1']
     return timestamps_data['sha1']
 
 
   # Make long hangs when updating the toolchain less mysterious.
   # Make long hangs when updating the toolchain less mysterious.
-  print 'Calculating hash of toolchain in %s. Please wait...' % full_root_path
+  print('Calculating hash of toolchain in %s. Please wait...' % full_root_path)
   sys.stdout.flush()
   sys.stdout.flush()
   digest = hashlib.sha1()
   digest = hashlib.sha1()
   for path in file_list:
   for path in file_list:
@@ -189,7 +191,7 @@ def CalculateToolchainHashes(root, remove_corrupt_toolchains):
   for d in dir_list:
   for d in dir_list:
     toolchain_hash = CalculateHash(root, d)
     toolchain_hash = CalculateHash(root, d)
     if toolchain_hash != d:
     if toolchain_hash != d:
-      print ('The hash of a version of the toolchain has an unexpected value ('
+      print('The hash of a version of the toolchain has an unexpected value ('
              '%s instead of %s)%s.' % (toolchain_hash, d,
              '%s instead of %s)%s.' % (toolchain_hash, d,
              ', removing it' if remove_corrupt_toolchains else ''))
              ', removing it' if remove_corrupt_toolchains else ''))
       if remove_corrupt_toolchains:
       if remove_corrupt_toolchains:
@@ -259,23 +261,23 @@ def RequestGsAuthentication():
   Googler. This allows much faster downloads, and pulling (old) toolchains
   Googler. This allows much faster downloads, and pulling (old) toolchains
   that match src/ revisions.
   that match src/ revisions.
   """
   """
-  print 'Access to gs://chrome-wintoolchain/ not configured.'
-  print '-----------------------------------------------------------------'
-  print
-  print 'You appear to be a Googler.'
-  print
-  print 'I\'m sorry for the hassle, but you need to do a one-time manual'
-  print 'authentication. Please run:'
-  print
-  print '    download_from_google_storage --config'
-  print
-  print 'and follow the instructions.'
-  print
-  print 'NOTE 1: Use your google.com credentials, not chromium.org.'
-  print 'NOTE 2: Enter 0 when asked for a "project-id".'
-  print
-  print '-----------------------------------------------------------------'
-  print
+  print('Access to gs://chrome-wintoolchain/ not configured.')
+  print('-----------------------------------------------------------------')
+  print()
+  print('You appear to be a Googler.')
+  print()
+  print('I\'m sorry for the hassle, but you need to do a one-time manual')
+  print('authentication. Please run:')
+  print()
+  print('    download_from_google_storage --config')
+  print()
+  print('and follow the instructions.')
+  print()
+  print('NOTE 1: Use your google.com credentials, not chromium.org.')
+  print('NOTE 2: Enter 0 when asked for a "project-id".')
+  print()
+  print('-----------------------------------------------------------------')
+  print()
   sys.stdout.flush()
   sys.stdout.flush()
   sys.exit(1)
   sys.exit(1)
 
 
@@ -289,7 +291,7 @@ def DelayBeforeRemoving(target_dir):
               '\rRemoving old toolchain in %ds... (Ctrl-C to cancel)' % i)
               '\rRemoving old toolchain in %ds... (Ctrl-C to cancel)' % i)
       sys.stdout.flush()
       sys.stdout.flush()
       time.sleep(1)
       time.sleep(1)
-    print
+    print()
 
 
 
 
 def DownloadUsingHttp(filename):
 def DownloadUsingHttp(filename):
@@ -396,7 +398,7 @@ def RemoveUnusedToolchains(root):
       os.remove(full_path)
       os.remove(full_path)
 
 
   for d in dirs_to_remove:
   for d in dirs_to_remove:
-    print ('Removing %s as it doesn\'t correspond to any known toolchain.' %
+    print('Removing %s as it doesn\'t correspond to any known toolchain.' %
            os.path.join(root, d))
            os.path.join(root, d))
     # Use the RemoveToolchain function to remove these directories as they might
     # Use the RemoveToolchain function to remove these directories as they might
     # contain an older version of the toolchain.
     # contain an older version of the toolchain.
@@ -408,7 +410,7 @@ def RemoveUnusedToolchains(root):
   for toolchain in valid_toolchains:
   for toolchain in valid_toolchains:
     toolchain_age_in_sec = time.time() - toolchain[0]
     toolchain_age_in_sec = time.time() - toolchain[0]
     if toolchain_age_in_sec > toolchain_expiration_time:
     if toolchain_age_in_sec > toolchain_expiration_time:
-      print ('Removing version %s of the Win toolchain as it hasn\'t been used'
+      print('Removing version %s of the Win toolchain as it hasn\'t been used'
              ' in the past %d days.' % (toolchain[1],
              ' in the past %d days.' % (toolchain[1],
                                         toolchain_age_in_sec / 60 / 60 / 24))
                                         toolchain_age_in_sec / 60 / 60 / 24))
       RemoveToolchain(root, toolchain[1], True)
       RemoveToolchain(root, toolchain[1], True)
@@ -569,10 +571,10 @@ def main():
   if got_new_toolchain:
   if got_new_toolchain:
     current_hashes = CalculateToolchainHashes(target_dir, False)
     current_hashes = CalculateToolchainHashes(target_dir, False)
     if desired_hash not in current_hashes:
     if desired_hash not in current_hashes:
-      print >> sys.stderr, (
+      print(
           'Got wrong hash after pulling a new toolchain. '
           'Got wrong hash after pulling a new toolchain. '
           'Wanted \'%s\', got one of \'%s\'.' % (
           'Wanted \'%s\', got one of \'%s\'.' % (
-              desired_hash, ', '.join(current_hashes)))
+              desired_hash, ', '.join(current_hashes)), file=sys.stderr)
       return 1
       return 1
     SaveTimestampsAndHash(target_dir, desired_hash)
     SaveTimestampsAndHash(target_dir, desired_hash)
 
 

+ 10 - 8
win_toolchain/package_from_installed.py

@@ -30,6 +30,8 @@ useful as the resulting zip can't be redistributed, and most will presumably
 have a Pro license anyway).
 have a Pro license anyway).
 """
 """
 
 
+from __future__ import print_function
+
 import collections
 import collections
 import glob
 import glob
 import json
 import json
@@ -409,14 +411,14 @@ def AddEnvSetup(files):
                 'win_sdk\\bin\\SetEnv.arm64.json'))
                 'win_sdk\\bin\\SetEnv.arm64.json'))
   vs_version_file = os.path.join(tempdir, 'VS_VERSION')
   vs_version_file = os.path.join(tempdir, 'VS_VERSION')
   with open(vs_version_file, 'wb') as version:
   with open(vs_version_file, 'wb') as version:
-    print >>version, VS_VERSION
+    print(VS_VERSION, file=version)
   files.append((vs_version_file, 'VS_VERSION'))
   files.append((vs_version_file, 'VS_VERSION'))
 
 
 
 
 def RenameToSha1(output):
 def RenameToSha1(output):
   """Determine the hash in the same way that the unzipper does to rename the
   """Determine the hash in the same way that the unzipper does to rename the
   # .zip file."""
   # .zip file."""
-  print 'Extracting to determine hash...'
+  print('Extracting to determine hash...')
   tempdir = tempfile.mkdtemp()
   tempdir = tempfile.mkdtemp()
   old_dir = os.getcwd()
   old_dir = os.getcwd()
   os.chdir(tempdir)
   os.chdir(tempdir)
@@ -424,13 +426,13 @@ def RenameToSha1(output):
   with zipfile.ZipFile(
   with zipfile.ZipFile(
       os.path.join(old_dir, output), 'r', zipfile.ZIP_DEFLATED, True) as zf:
       os.path.join(old_dir, output), 'r', zipfile.ZIP_DEFLATED, True) as zf:
     zf.extractall(rel_dir)
     zf.extractall(rel_dir)
-  print 'Hashing...'
+  print('Hashing...')
   sha1 = get_toolchain_if_necessary.CalculateHash(rel_dir, None)
   sha1 = get_toolchain_if_necessary.CalculateHash(rel_dir, None)
   os.chdir(old_dir)
   os.chdir(old_dir)
   shutil.rmtree(tempdir)
   shutil.rmtree(tempdir)
   final_name = sha1 + '.zip'
   final_name = sha1 + '.zip'
   os.rename(output, final_name)
   os.rename(output, final_name)
-  print 'Renamed %s to %s.' % (output, final_name)
+  print('Renamed %s to %s.' % (output, final_name))
 
 
 
 
 def main():
 def main():
@@ -454,7 +456,7 @@ def main():
     files = BuildRepackageFileList(options.repackage_dir)
     files = BuildRepackageFileList(options.repackage_dir)
   else:
   else:
     if len(args) != 1 or args[0] not in ('2015', '2017'):
     if len(args) != 1 or args[0] not in ('2015', '2017'):
-      print 'Must specify 2015 or 2017'
+      print('Must specify 2015 or 2017')
       parser.print_help();
       parser.print_help();
       return 1
       return 1
 
 
@@ -462,7 +464,7 @@ def main():
       if (not os.path.exists(os.path.join(options.override_dir, 'bin')) or
       if (not os.path.exists(os.path.join(options.override_dir, 'bin')) or
           not os.path.exists(os.path.join(options.override_dir, 'include')) or
           not os.path.exists(os.path.join(options.override_dir, 'include')) or
           not os.path.exists(os.path.join(options.override_dir, 'lib'))):
           not os.path.exists(os.path.join(options.override_dir, 'lib'))):
-        print 'Invalid override directory - must contain bin/include/lib dirs'
+        print('Invalid override directory - must contain bin/include/lib dirs')
         return 1
         return 1
 
 
     global VS_VERSION
     global VS_VERSION
@@ -478,14 +480,14 @@ def main():
     else:
     else:
       VC_TOOLS = 'VC'
       VC_TOOLS = 'VC'
 
 
-    print 'Building file list for VS %s Windows %s...' % (VS_VERSION, WIN_VERSION)
+    print('Building file list for VS %s Windows %s...' % (VS_VERSION, WIN_VERSION))
     files = BuildFileList(options.override_dir)
     files = BuildFileList(options.override_dir)
 
 
     AddEnvSetup(files)
     AddEnvSetup(files)
 
 
   if False:
   if False:
     for f in files:
     for f in files:
-      print f[0], '->', f[1]
+      print(f[0], '->', f[1])
     return 0
     return 0
 
 
   output = 'out.zip'
   output = 'out.zip'

+ 5 - 3
wtf

@@ -5,6 +5,8 @@
 
 
 """Display active git branches and code changes in a chromiumos workspace."""
 """Display active git branches and code changes in a chromiumos workspace."""
 
 
+from __future__ import print_function
+
 import gclient_utils
 import gclient_utils
 import os
 import os
 import re
 import re
@@ -38,7 +40,7 @@ def show_dir(full_name, relative_name, color):
     if lines_printed == 0:
     if lines_printed == 0:
       show_name()
       show_name()
     lines_printed += 1
     lines_printed += 1
-    print branch
+    print(branch)
 
 
   status = subprocess.Popen(['git', 'status'],
   status = subprocess.Popen(['git', 'status'],
                             cwd=full_name,
                             cwd=full_name,
@@ -48,8 +50,8 @@ def show_dir(full_name, relative_name, color):
     if lines_printed == 0:
     if lines_printed == 0:
       show_name()
       show_name()
     if lines_printed == 1:
     if lines_printed == 1:
-      print '---------------'
-    print status
+      print('---------------')
+    print(status)
 
 
 
 
 def main():
 def main():