Commit 80ee78e7 authored by Raul Tambre's avatar Raul Tambre Committed by Commit Bot

Convert print statements to Python 3 style

Ran "2to3 -w -n -f print ./" and manually added imports.
Ran "^\s*print " and "\s+print " to find batch/shell scripts, comments and the like with embedded code, and updated them manually.
Also manually added imports to files, which used print as a function, but were missing the import.

The scripts still work with Python 2.
There are no intended behaviour changes.

Bug: 942522
Change-Id: Id777e4d4df4adcdfdab1b18bde89f235ef491b9f
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/1595684Reviewed-by: 's avatarDirk Pranke <dpranke@chromium.org>
Commit-Queue: Dirk Pranke <dpranke@chromium.org>
Auto-Submit: Raul Tambre <raul@tambre.ee>
parent ad1c8b22
......@@ -12,6 +12,8 @@ To run `gclient sync --gclientfile=.gclient` and annotate got_v8_revision:
sync --gclientfile=.gclient`
"""
from __future__ import print_function
import contextlib
import json
import optparse
......@@ -31,7 +33,7 @@ def temp_filename(suffix='', prefix='tmp'):
try:
os.remove(output_file)
except OSError as e:
print 'Error cleaning up temp file %s: %s' % (output_file, e)
print('Error cleaning up temp file %s: %s' % (output_file, e))
def parse_got_revision(filename, revision_mapping):
......@@ -51,7 +53,7 @@ def parse_got_revision(filename, revision_mapping):
def emit_buildprops(got_revisions):
for prop, revision in got_revisions.iteritems():
print '@@@SET_BUILD_PROPERTY@%s@%s@@@' % (prop, json.dumps(revision))
print('@@@SET_BUILD_PROPERTY@%s@%s@@@' % (prop, json.dumps(revision)))
def main():
......
......@@ -4,6 +4,8 @@
"""Google OAuth2 related functions."""
from __future__ import print_function
import BaseHTTPServer
import collections
import datetime
......
......@@ -6,7 +6,7 @@
setlocal
REM Set unique build ID.
FOR /f "usebackq tokens=*" %%a in (`python -c "import uuid; print uuid.uuid4()"`) do set AUTONINJA_BUILD_ID=%%a
FOR /f "usebackq tokens=*" %%a in (`python -c "from __future__ import print_function; import uuid; print(uuid.uuid4())"`) do set AUTONINJA_BUILD_ID=%%a
REM If a build performance summary has been requested then also set NINJA_STATUS
REM to trigger more verbose status updates. In particular this makes it possible
......
......@@ -10,6 +10,8 @@ and safer, and avoids errors that can cause slow goma builds or swap-storms
on non-goma builds.
"""
from __future__ import print_function
import multiprocessing
import os
import re
......@@ -108,4 +110,4 @@ for i in range(len(args)):
if (i == 0 and sys.platform.startswith('win')) or ' ' in args[i]:
args[i] = '"%s"' % args[i].replace('"', '\\"')
print ' '.join(args)
print(' '.join(args))
......@@ -15,6 +15,8 @@ Usage:
Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
"""
from __future__ import print_function
import argparse
import json
import urlparse
......@@ -153,9 +155,9 @@ def main(argv):
http.force_exception_to_status_code = True
if args.verbose:
print 'Request URL:', url
print 'Request method:', method
print 'Request body:', body
print('Request URL:', url)
print('Request method:', method)
print('Request body:', body)
response, content = http.request(
url,
......@@ -165,8 +167,8 @@ def main(argv):
)
if args.verbose:
print 'Response:', response
print 'Content:', content
print('Response:', response)
print('Content:', content)
try:
content_json = json.loads(content)
......@@ -177,7 +179,7 @@ def main(argv):
except (ValueError, TypeError, KeyError):
pass
else:
print 'Build: %s' % build_url
print('Build: %s' % build_url)
return response.status != 200
......
......@@ -7,6 +7,8 @@
Includes support only for git.
"""
from __future__ import print_function
import fnmatch
import logging
import os
......@@ -288,8 +290,8 @@ class GitCheckout(CheckoutBase):
for post in post_processors:
post(self, p)
if verbose:
print p.filename
print align_stdout(stdout)
print(p.filename)
print(align_stdout(stdout))
except OSError, e:
errors.append((p, '%s%s' % (align_stdout(stdout), e)))
except subprocess.CalledProcessError, e:
......@@ -307,9 +309,9 @@ class GitCheckout(CheckoutBase):
extra_files = sorted(set(found_files) - set(patches.filenames))
unpatched_files = sorted(set(patches.filenames) - set(found_files))
if extra_files:
print 'Found extra files: %r' % (extra_files,)
print('Found extra files: %r' % extra_files)
if unpatched_files:
print 'Found unpatched files: %r' % (unpatched_files,)
print('Found unpatched files: %r' % unpatched_files)
def commit(self, commit_message, user):
......
......@@ -14,6 +14,8 @@ This tool does a two things:
# TODO(hinoka,iannucci): Pre-pack infra tools in cipd package with vpython spec.
from __future__ import print_function
import argparse
import sys
import os
......@@ -102,20 +104,20 @@ def get_available_tools():
def usage():
infra_tools, cipd_tools = get_available_tools()
print """usage: cit.py <name of tool> [args for tool]
print("""usage: cit.py <name of tool> [args for tool]
Wrapper for maintaining and calling tools in:
"infra.git/run.py infra.tools.*"
"infra.git/cipd/*"
Available infra tools are:"""
Available infra tools are:""")
for tool in infra_tools:
print ' * %s' % tool
print(' * %s' % tool)
print """
Available cipd tools are:"""
print("""
Available cipd tools are:""")
for tool in cipd_tools:
print ' * %s' % tool
print(' * %s' % tool)
def run(args):
......@@ -137,7 +139,7 @@ def run(args):
elif os.path.isfile(cipd_file) and is_exe(cipd_file):
cmd = [cipd_file]
else:
print >>sys.stderr, 'Unknown tool "%s"' % tool_name
print('Unknown tool "%s"' % tool_name, file=sys.stderr)
return usage()
# Add the remaining arguments.
......
......@@ -9,6 +9,8 @@ clang-format binaries are pulled down from Google Cloud Storage whenever you
sync Chrome, to platform-specific locations. This script knows how to locate
those tools, assuming the script is invoked from inside a Chromium checkout."""
from __future__ import print_function
import gclient_paths
import os
import subprocess
......@@ -63,7 +65,8 @@ def main(args):
# redirection can be a little opaque.
help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
if any(match in args for match in help_syntax):
print '\nDepot tools redirects you to the clang-format at:\n %s\n' % tool
print(
'\nDepot tools redirects you to the clang-format at:\n %s\n' % tool)
return subprocess.call([tool] + args)
......
......@@ -14,6 +14,8 @@ See https://git-scm.com/docs/gitattributes ("Defining a custom merge
driver") for more details.
"""
from __future__ import print_function
import subprocess
import sys
......@@ -29,14 +31,14 @@ def main():
base, current, others, file_name_in_tree = sys.argv[1:5]
if file_name_in_tree == '%P':
print >>sys.stderr
print >>sys.stderr, 'ERROR: clang-format merge driver needs git 2.5+'
print(file=sys.stderr)
print('ERROR: clang-format merge driver needs git 2.5+', file=sys.stderr)
if sys.platform == 'darwin':
print >>sys.stderr, 'Upgrade to Xcode 7.2+'
print >>sys.stderr
print('Upgrade to Xcode 7.2+', file=sys.stderr)
print(file=sys.stderr)
return 1
print 'Running clang-format 3-way merge driver on ' + file_name_in_tree
print('Running clang-format 3-way merge driver on ' + file_name_in_tree)
try:
tool = clang_format.FindClangFormatToolInChromiumTree()
......@@ -56,8 +58,8 @@ def main():
with open(fpath, 'wb') as output_file:
output_file.write(output)
except clang_format.NotFoundError, e:
print e
print 'Failed to find clang-format. Falling-back on standard 3-way merge'
print(e)
print('Failed to find clang-format. Falling-back on standard 3-way merge')
return subprocess.call(['git', 'merge-file', '-Lcurrent', '-Lbase', '-Lother',
current, base, others])
......
......@@ -3,6 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import os
import subprocess
......@@ -47,7 +49,7 @@ def main():
abs_build_dir = os.path.join(src_dir, options.build_dir)
src_relpath = os.path.relpath(options.file_path, abs_build_dir)
print 'Building %s' % options.file_path
print('Building %s' % options.file_path)
ninja_exec = 'ninja'
carets = '^'
......
......@@ -10,6 +10,8 @@ dartfmt binaries are pulled down during gclient sync in the mojo repo.
This tool is named dart_format.py instead of dartfmt to parallel
clang_format.py, which is in this same repository."""
from __future__ import print_function
import os
import subprocess
import sys
......@@ -42,14 +44,14 @@ def main(args):
try:
tool = FindDartFmtToolInChromiumTree()
except NotFoundError, e:
print >> sys.stderr, e
print(e, file=sys.stderr)
sys.exit(1)
# Add some visibility to --help showing where the tool lives, since this
# redirection can be a little opaque.
help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
if any(match in args for match in help_syntax):
print '\nDepot tools redirects you to the dartfmt at:\n %s\n' % tool
print('\nDepot tools redirects you to the dartfmt at:\n %s\n' % tool)
return subprocess.call([tool] + sys.argv[1:])
......
......@@ -11,6 +11,8 @@ Usage:
depot-tools-auth logout codereview.chromium.org
"""
from __future__ import print_function
import logging
import optparse
import sys
......@@ -39,7 +41,7 @@ def CMDlogout(parser, args):
"""Revokes cached authentication token and removes it from disk."""
_, authenticator = parser.parse_args(args)
done = authenticator.logout()
print 'Done.' if done else 'Already logged out.'
print('Done.' if done else 'Already logged out.')
return 0
......@@ -54,12 +56,12 @@ def CMDinfo(parser, args):
def print_token_info(hostname, authenticator):
token_info = authenticator.get_token_info()
print 'Logged in to %s as %s.' % (hostname, token_info['email'])
print ''
print 'To login with a different email run:'
print ' depot-tools-auth login %s' % hostname
print 'To logout and purge the authentication token run:'
print ' depot-tools-auth logout %s' % hostname
print('Logged in to %s as %s.' % (hostname, token_info['email']))
print('')
print('To login with a different email run:')
print(' depot-tools-auth login %s' % hostname)
print('To logout and purge the authentication token run:')
print(' depot-tools-auth logout %s' % hostname)
class OptionParser(optparse.OptionParser):
......@@ -89,7 +91,7 @@ def main(argv):
try:
return dispatcher.execute(OptionParser(), argv)
except auth.AuthenticationError as e:
print >> sys.stderr, e
print(e, file=sys.stderr)
return 1
......
......@@ -6,6 +6,8 @@
multiple platforms with python.
"""
from __future__ import print_function
import codecs
import locale
import os
......@@ -22,8 +24,9 @@ def complain(message):
to our wrapper. So be paranoid about catching errors and reporting them
to sys.__stderr__, so that the user has a higher chance to see them.
"""
print >> sys.__stderr__, (
isinstance(message, str) and message or repr(message))
print(
isinstance(message, str) and message or repr(message),
file=sys.__stderr__)
def fix_default_encoding():
......
......@@ -7,6 +7,8 @@
# gclient-new-workdir.py [options] <repository> <new_workdir>
#
from __future__ import print_function
import argparse
import os
import shutil
......
......@@ -1650,12 +1650,11 @@ it or fix the checkout.
# clean checkout.
gclient_scm.scm.GIT.CleanupDir(par_scm_root, rel_e_dir)
assert not os.path.exists(os.path.join(e_dir, '.git'))
print(('\nWARNING: \'%s\' has been moved from DEPS to a higher '
'level checkout. The git folder containing all the local'
' branches has been saved to %s.\n'
'If you don\'t care about its state you can safely '
'remove that folder to free up space.') %
(entry, save_dir))
print('\nWARNING: \'%s\' has been moved from DEPS to a higher '
'level checkout. The git folder containing all the local'
' branches has been saved to %s.\n'
'If you don\'t care about its state you can safely '
'remove that folder to free up space.' % (entry, save_dir))
continue
if scm_root in full_entries:
......@@ -1684,9 +1683,9 @@ it or fix the checkout.
should_recurse=False,
relative=None,
condition=None))
print(('\nWARNING: \'%s\' is no longer part of this client.\n'
'It is recommended that you manually remove it or use '
'\'gclient sync -D\' next time.') % entry_fixed)
print('\nWARNING: \'%s\' is no longer part of this client.\n'
'It is recommended that you manually remove it or use '
'\'gclient sync -D\' next time.' % entry_fixed)
else:
# Delete the entry
print('\n________ deleting \'%s\' in \'%s\'' % (
......
......@@ -8,6 +8,8 @@ Utilities for requesting information for a gerrit server via https.
https://gerrit-review.googlesource.com/Documentation/rest-api.html
"""
from __future__ import print_function
import base64
import contextlib
import cookielib
......@@ -166,10 +168,10 @@ class CookiesAuthenticator(Authenticator):
st = os.stat(path)
if st.st_mode & (stat.S_IRWXG | stat.S_IRWXO):
print >> sys.stderr, (
print(
'WARNING: netrc file %s cannot be used because its file '
'permissions are insecure. netrc file permissions should be '
'600.' % path)
'600.' % path, file=sys.stderr)
with open(path) as fd:
content = fd.read()
......@@ -189,11 +191,11 @@ class CookiesAuthenticator(Authenticator):
try:
return netrc.netrc(path)
except IOError:
print >> sys.stderr, 'WARNING: Could not read netrc file %s' % path
print('WARNING: Could not read netrc file %s' % path, file=sys.stderr)
return netrc.netrc(os.devnull)
except netrc.NetrcParseError as e:
print >> sys.stderr, ('ERROR: Cannot use netrc file %s due to a '
'parsing error: %s' % (path, e))
print('ERROR: Cannot use netrc file %s due to a parsing error: %s' %
(path, e), file=sys.stderr)
return netrc.netrc(os.devnull)
@classmethod
......@@ -786,7 +788,7 @@ def AddReviewers(host, change, reviewers=None, ccs=None, notify=True,
'reviewer': r,
'state': state,
'notify': 'NONE', # We handled `notify` argument above.
})
})
conn = CreateHttpConn(host, path, reqtype='POST', body=body)
# Gerrit will return 400 if one or more of the requested reviewers are
......
......@@ -1998,14 +1998,14 @@ class _GerritChangelistImpl(_ChangelistCodereviewBase):
if gerrit_auth == git_auth:
return
all_gsrc = cookie_auth.get_auth_header('d0esN0tEx1st.googlesource.com')
print((
print(
'WARNING: You have different credentials for Gerrit and git hosts:\n'
' %s\n'
' %s\n'
' Consider running the following command:\n'
' git cl creds-check\n'
' %s\n'
' %s') %
' %s' %
(git_host, self._gerrit_host,
('Hint: delete creds for .googlesource.com' if all_gsrc else ''),
cookie_auth.get_new_password_message(git_host)))
......
......@@ -4,6 +4,8 @@
# found in the LICENSE file.
"""git drover: A tool for merging changes to release branches."""
from __future__ import print_function
import argparse
import cPickle
import functools
......@@ -452,7 +454,7 @@ def main():
options.parent_checkout, options.dry_run,
options.verbose)
except Error as e:
print 'Error:', e.message
print('Error:', e.message)
sys.exit(128)
......
......@@ -11,6 +11,8 @@ Note that it uses the "cherry picked from" annotation to find merges, so it will
only work on merges that followed the "use cherry-pick -x" instructions.
"""
from __future__ import print_function
import optparse
import re
import sys
......@@ -43,16 +45,16 @@ def main():
for arg in args:
commit_name = GetNameForCommit(arg)
if not commit_name:
print '%s not found' % arg
print('%s not found' % arg)
return 1
print 'commit %s was:' % arg
print ' initially in ' + commit_name
print('commit %s was:' % arg)
print(' initially in ' + commit_name)
merges = GetMergesForCommit(arg)
for merge in merges:
print ' merged to ' + GetNameForCommit(merge) + ' (as ' + merge + ')'
print(' merged to ' + GetNameForCommit(merge) + ' (as ' + merge + ')')
if not merges:
print 'No merges found. If this seems wrong, be sure that you did:'
print ' git fetch origin && gclient sync --with_branch_heads'
print('No merges found. If this seems wrong, be sure that you did:')
print(' git fetch origin && gclient sync --with_branch_heads')
return 0
......
......@@ -3,6 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import json
import re
......@@ -229,23 +231,23 @@ def main(args):
if opts.key:
for v in footers.get(normalize_name(opts.key), []):
print v
print(v)
elif opts.position:
pos = get_position(footers)
print '%s@{#%s}' % (pos[0], pos[1] or '?')
print('%s@{#%s}' % (pos[0], pos[1] or '?'))
elif opts.position_ref:
print get_position(footers)[0]
print(get_position(footers)[0])
elif opts.position_num:
pos = get_position(footers)
assert pos[1], 'No valid position for commit'
print pos[1]
print(pos[1])
elif opts.json:
with open(opts.json, 'w') as f:
json.dump(footers, f)
else:
for k in footers.keys():
for v in footers[k]:
print '%s: %s' % (k, v)
print('%s: %s' % (k, v))
return 0
......
......@@ -3,6 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import sys
import optparse
......@@ -26,7 +28,7 @@ def main(args):
dispatcher = subcommand.CommandDispatcher(__name__)
ret = dispatcher.execute(optparse.OptionParser(), args)
if ret:
print ret
print(ret)
return 0
......
......@@ -24,6 +24,8 @@ Branches are colorized as follows:
upstream, then you will see this.
"""
from __future__ import print_function
import argparse
import collections
import os
......@@ -185,8 +187,8 @@ class BranchMapper(object):
parent = self.__branches_info[cycle[-1]].upstream
cycle.append(parent)
if parent == branch:
print >> sys.stderr, 'Warning: Detected cycle in branches: {}'.format(
' -> '.join(cycle))
print('Warning: Detected cycle in branches: {}'.format(
' -> '.join(cycle)), file=sys.stderr)
return True
return False
......@@ -312,11 +314,11 @@ def print_desc():
def main(argv):
setup_color.init()
if get_git_version() < MIN_UPSTREAM_TRACK_GIT_VERSION:
print >> sys.stderr, (
print(
'This tool will not show all tracking information for git version '
'earlier than ' +
'.'.join(str(x) for x in MIN_UPSTREAM_TRACK_GIT_VERSION) +
'. Please consider upgrading.')
'. Please consider upgrading.', file=sys.stderr)
if '-h' in argv:
print_desc()
......@@ -342,7 +344,7 @@ def main(argv):
mapper.maxjobs = opts.maxjobs
mapper.show_subject = opts.show_subject
mapper.start()
print mapper.output.as_formatted_string()
print(mapper.output.as_formatted_string())
return 0
if __name__ == '__main__':
......
......@@ -11,6 +11,8 @@ purposes of the chromium depot_tools git extensions. Passing no arguments will
just print the effective merge base for the current branch.
"""
from __future__ import print_function
import argparse
import sys
......@@ -39,16 +41,16 @@ def main(argv):
try:
remove_merge_base(cur)
except CalledProcessError:
print 'No merge base currently exists for %s.' % cur
print('No merge base currently exists for %s.' % cur)
return 0
if opts.merge_base:
try:
opts.merge_base = hash_one(opts.merge_base)
except CalledProcessError:
print >> sys.stderr, (
'fatal: could not resolve %s as a commit' % (opts.merge_base)
)
print(
'fatal: could not resolve %s as a commit' % opts.merge_base,
file=sys.stderr)
return 1
manual_merge_base(cur, opts.merge_base, upstream(cur))
......@@ -57,9 +59,9 @@ def main(argv):
actual = get_or_create_merge_base(cur)
if opts.merge_base and opts.merge_base != actual:
ret = 1
print "Invalid merge_base %s" % opts.merge_base
print("Invalid merge_base %s" % opts.merge_base)
print "merge_base(%s): %s" % (cur, actual)
print("merge_base(%s): %s" % (cur, actual))
return ret
......
......@@ -9,6 +9,8 @@ is more than one downstream branch, then this script will prompt you to select
which branch.
"""
from __future__ import print_function
import argparse
import sys
......@@ -36,24 +38,24 @@ def main(args):
cur = hash_one(cur)
downstreams = [b for b in branches() if upfn(b) == cur]
if not downstreams:
print "No downstream branches"
print("No downstream branches")
return 1
elif len(downstreams) == 1:
run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
else:
high = len(downstreams) - 1
while True:
print "Please select a downstream branch"
print("Please select a downstream branch")
for i, b in enumerate(downstreams):
print " %d. %s" % (i, b)
print(" %d. %s" % (i, b))
prompt = "Selection (0-%d)[0]: " % high
r = opts.pick
if r:
print prompt + r
print(prompt + r)
else:
r = raw_input(prompt).strip() or '0'
if not r.isdigit() or (0 > int(r) > high):
print "Invalid choice."
print("Invalid choice.")
else:
run('checkout', downstreams[int(r)], stdout=sys.stdout,
stderr=sys.stderr)
......
......@@ -21,6 +21,8 @@ commit's entire history, this script caches all calculated data inside the git
repo that it operates on in the ref 'refs/number/commits'.
"""
from __future__ import print_function
import binascii
import collections
import logging
......@@ -278,7 +280,7 @@ def main(): # pragma: no cover
if not opts.no_cache:
finalize(targets)
print '\n'.join(map(str, map(get_num, targets)))
print('\n'.join(map(str, map(get_num, targets))))
return 0
......
......@@ -7,6 +7,8 @@
Tool to update all branches to have the latest changes from their upstreams.
"""
from __future__ import print_function
import argparse
import collections
import logging
......@@ -76,7 +78,7 @@ def fetch_remotes(branch_tree):
# TODO(iannucci): Should we fetch git-svn?
if not fetch_args: # pragma: no cover
print 'Nothing to fetch.'
print('Nothing to fetch.')
else:
git.run_with_stderr('fetch', *fetch_args, stdout=sys.stdout,
stderr=sys.stderr)
......@@ -124,16 +126,16 @@ def remove_empty_branches(branch_tree):
if parent in tag_set:
git.set_branch_config(branch, 'remote', '.')
git.set_branch_config(branch, 'merge', 'refs/tags/%s' % parent)
print ('Reparented %s to track %s [tag] (was tracking %s)'
% (branch, parent, old_parent))
print('Reparented %s to track %s [tag] (was tracking %s)' %
(branch, parent, old_parent))
else:
git.run('branch', '--set-upstream-to', parent, branch)
print ('Reparented %s to track %s (was tracking %s)'
% (branch, parent, old_parent))
print('Reparented %s to track %s (was tracking %s)' % (branch, parent,
old_parent))
# Apply all deletions recorded, in order.
for branch, _ in sorted(deletions.iteritems(), key=lambda x: x[1]):
print git.run('branch', '-d', branch)
print(git.run('branch', '-d', branch))
def rebase_branch(branch, parent, start_hash):
......@@ -156,11 +158,11 @@ def rebase_branch(branch, parent, start_hash):
if git.hash_one(parent) != start_hash:
# Try a plain rebase first
print 'Rebasing:', branch
print('Rebasing:', branch)
rebase_ret = git.rebase(parent, start_hash, branch, abort=True)
if not rebase_ret.success:
# TODO(iannucci): Find collapsible branches in a smarter way?
print "Failed! Attempting to squash", branch, "...",
print("Failed! Attempting to squash", branch, "...", end=' ')
sys.stdout.flush()
squash_branch = branch+"_squash_attempt"
git.run('checkout', '-b', squash_branch)
......@@ -172,12 +174,12 @@ def rebase_branch(branch, parent, start_hash):
git.run('checkout', branch)
git.run('branch', '-D', squash_branch)
if squash_ret.success and empty_rebase:
print 'Success!'
print('Success!')
git.squash_current_branch(merge_base=start_hash)
git.rebase(parent, start_hash, branch)
else:
print "Failed!"
print
print("Failed!")
print()
# rebase and leave in mid-rebase state.
# This second rebase attempt should always fail in the same
......@@ -185,17 +187,16 @@ def rebase_branch(branch, parent, start_hash):
# something very strange has happened.
second_rebase_ret = git.rebase(parent, start_hash, branch)
if second_rebase_ret.success: # pragma: no cover
print "Second rebase succeeded unexpectedly!"
print "Please see: http://crbug.com/425696"
print "First rebased failed with:"
print rebase_ret.stderr
print("Second rebase succeeded unexpectedly!")
print("Please see: http://crbug.com/425696")
print("First rebased failed with:")
print(rebase_ret.stderr)
else:
print "Here's what git-rebase (squashed) had to say:"
print
print squash_ret.stdout
print squash_ret.stderr
print textwrap.dedent(
"""\
print("Here's what git-rebase (squashed) had to say:")
print()
print(squash_ret.stdout)
print(squash_ret.stderr)
print(textwrap.dedent("""\
Squashing failed. You probably have a real merge conflict.
Your working copy is in mid-rebase. Either:
......@@ -204,10 +205,10 @@ def rebase_branch(branch, parent, start_hash):
git config branch.%s.dormant true
And then run `git rebase-update` again to resume.
""" % branch)
""" % branch))
return False
else:
print '%s up-to-date' % branch
print('%s up-to-date' % branch)
git.remove_merge_base(branch)
git.get_or_create_merge_base(branch)
......@@ -243,10 +244,8 @@ def main(args=None):
if git.in_rebase():
# TODO(iannucci): Be able to resume rebase with flags like --continue,
# etc.
print (
'Rebase in progress. Please complete the rebase before running '
'`git rebase-update`.'
)
print('Rebase in progress. Please complete the rebase before running '
'`git rebase-update`.')
return 1
return_branch, return_workdir = find_return_branch_workdir()
......@@ -254,7 +253,7 @@ def main(args=None):
if git.current_branch() == 'HEAD':
if git.run('status', '--porcelain'):
print 'Cannot rebase-update with detached head + uncommitted changes.'
print('Cannot rebase-update with detached head + uncommitted changes.')
return 1
else:
git.freeze() # just in case there are any local changes.
......@@ -267,7 +266,7 @@ def main(args=None):
if branches_to_rebase:
skipped = set(skipped).intersection(branches_to_rebase)
for branch in skipped:
print 'Skipping %s: No upstream specified' % branch
print('Skipping %s: No upstream specified' % branch)
if not opts.no_fetch:
fetch_remotes(branch_tree)
......@@ -288,28 +287,28 @@ def main(args=None):
if branches_to_rebase and branch not in branches_to_rebase:
continue
if git.is_dormant(branch):
print 'Skipping dormant branch', branch
print('Skipping dormant branch', branch)
else:
ret = rebase_branch(branch, parent, merge_base[branch])
if not ret:
retcode = 1
if opts.keep_going:
print '--keep-going set, continuing with next branch.'
print('--keep-going set, continuing with next branch.')
unrebased_branches.append(branch)
if git.in_rebase():
git.run_with_retcode('rebase', '--abort')
if git.in_rebase(): # pragma: no cover
print 'Failed to abort rebase. Something is really wrong.'
print('Failed to abort rebase. Something is really wrong.')
break
else:
break
if unrebased_branches:
print
print 'The following branches could not be cleanly rebased:'
print()
print('The following branches could not be cleanly rebased:')
for branch in unrebased_branches:
print ' %s' % branch
print(' %s' % branch)
if not retcode:
remove_empty_branches(branch_tree)
......@@ -321,10 +320,8 @@ def main(args=None):
else:
root_branch = git.root()
if return_branch != 'HEAD':
print (
"%r was merged with its parent, checking out %r instead."
% (return_branch, root_branch)
)
print("%r was merged with its parent, checking out %r instead." %
(return_branch, root_branch))
git.run('checkout', root_branch)
# return_workdir may also not be there any more.
......@@ -332,10 +329,8 @@ def main(args=None):
try:
os.chdir(return_workdir)
except OSError as e:
print (
"Unable to return to original workdir %r: %s"
% (return_workdir, e)
)
print(
"Unable to return to original workdir %r: %s" % (return_workdir, e))
git.set_config(STARTING_BRANCH_KEY, '')
git.set_config(STARTING_WORKDIR_KEY, '')
......
......@@ -5,6 +5,8 @@
"""Change the upstream of the current branch."""
from __future__ import print_function
import argparse
import sys
......@@ -55,7 +57,7 @@ def main(args):
"`git branch --set-upstream-to` to assign it one.\n\nPlease assign an "
"upstream branch and then run this command again."
)
print >> sys.stderr, msg % branch
print(msg % branch, file=sys.stderr)
return 1
mbase = get_or_create_merge_base(branch, cur_parent)
......@@ -67,17 +69,17 @@ def main(args):
try:
run('show-ref', new_parent)
except subprocess2.CalledProcessError:
print >> sys.stderr, 'fatal: invalid reference: %s' % new_parent
print('fatal: invalid reference: %s' % new_parent, file=sys.stderr)
return 1
if new_parent in all_tags:
print ("Reparenting %s to track %s [tag] (was %s)"
% (branch, new_parent, cur_parent))
print("Reparenting %s to track %s [tag] (was %s)" % (branch, new_parent,
cur_parent))
set_branch_config(branch, 'remote', '.')
set_branch_config(branch, 'merge', new_parent)
else:
print ("Reparenting %s to track %s (was %s)"
% (branch, new_parent, cur_parent))
print("Reparenting %s to track %s (was %s)" % (branch, new_parent,
cur_parent))
run('branch', '--set-upstream-to', new_parent, branch)
manual_merge_base(branch, mbase, new_parent)
......
......@@ -3,6 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import argparse
import sys
......@@ -31,12 +33,12 @@ def main(args):
opts.branch = current_branch
if not opts.branch or opts.branch == 'HEAD':
print 'fatal: Cannot perform git-upstream-diff while not on a branch'
print('fatal: Cannot perform git-upstream-diff while not on a branch')
return 1
par = git.upstream(opts.branch)
if not par:
print 'fatal: No upstream configured for branch \'%s\'' % opts.branch
print('fatal: No upstream configured for branch \'%s\'' % opts.branch)
return 1
cmd = [git.GIT_EXE, '-c', 'core.quotePath=false',
......
......@@ -12,6 +12,8 @@ binary. It will also automatically try to find the gn binary when run inside
the chrome source tree, so users can just type "gn" on the command line
(normally depot_tools is on the path)."""
from __future__ import print_function
import gclient_paths
import os
import subprocess
......@@ -56,13 +58,13 @@ def main(args):
# inside of buildtools.
bin_path = gclient_paths.GetBuildtoolsPlatformBinaryPath()
if not bin_path:
print >> sys.stderr, ('gn.py: Could not find checkout in any parent of '
'the current path.\nThis must be run inside a '
'checkout.')
print('gn.py: Could not find checkout in any parent of the current path.\n'
'This must be run inside a checkout.', file=sys.stderr)
return 1
gn_path = os.path.join(bin_path, 'gn' + gclient_paths.GetExeSuffix())
if not os.path.exists(gn_path):
print >> sys.stderr, 'gn.py: Could not find gn executable at: %s' % gn_path
print(
'gn.py: Could not find gn executable at: %s' % gn_path, file=sys.stderr)
return 2
else:
return subprocess.call([gn_path] + args[1:])
......
......@@ -3,6 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import collections
import os
import re
......@@ -13,7 +15,7 @@ from xml.sax.saxutils import escape
from cStringIO import StringIO
if not os.path.exists('ansi2html'):
print 'You must run ./make_docs.sh once before running this script.'
print('You must run ./make_docs.sh once before running this script.')
sys.exit(1)
# This dependency is pulled in by make_docs.sh
......
......@@ -32,6 +32,8 @@ Example:
# >
# [VPYTHON:END]
from __future__ import print_function
import collections
import contextlib
from datetime import datetime
......@@ -566,8 +568,8 @@ class MyActivity(object):
})
def print_heading(self, heading):
print
print self.options.output_format_heading.format(heading=heading)
print()
print(self.options.output_format_heading.format(heading=heading))
def match(self, author):
if '@' in self.user:
......@@ -654,8 +656,8 @@ class MyActivity(object):
}
if optional_values is not None:
values.update(optional_values)
print DefaultFormatter().format(output_format, **values).encode(
sys.getdefaultencoding())
print(DefaultFormatter().format(output_format,
**values).encode(sys.getdefaultencoding()))
def filter_issue(self, issue, should_filter_by_user=True):
......@@ -802,25 +804,25 @@ class MyActivity(object):
if changes_by_issue_uid[issue_uid] or not skip_empty_own:
self.print_issue(issues[issue_uid])
if changes_by_issue_uid[issue_uid]:
print
print()
for change in changes_by_issue_uid[issue_uid]:
print ' ', # this prints one space due to comma, but no newline
print(' ', end='') # this prints no newline
self.print_change(change)
print
print()
# Changes referencing others' issues.
for issue_uid in ref_issues:
assert changes_by_ref_issue_uid[issue_uid]
self.print_issue(ref_issues[issue_uid])
for change in changes_by_ref_issue_uid[issue_uid]:
print '', # this prints one space due to comma, but no newline
print('', end=' ') # this prints one space due to comma, but no newline
self.print_change(change)
# Changes referencing no issues.
if changes_without_issue:
print self.options.output_format_no_url.format(title='Other changes')
print(self.options.output_format_no_url.format(title='Other changes'))
for change in changes_without_issue:
print '', # this prints one space due to comma, but no newline
print('', end=' ') # this prints one space due to comma, but no newline
self.print_change(change)
def print_activity(self):
......@@ -855,7 +857,7 @@ class MyActivity(object):
'changes': format_for_json_dump(self.changes),
'issues': format_for_json_dump(self.issues)
}
print json.dumps(output, indent=2, cls=PythonObjectEncoder)
print(json.dumps(output, indent=2, cls=PythonObjectEncoder))
def main():
......
......@@ -8,6 +8,9 @@
Example:
- my_reviews.py -r me@chromium.org -Q for stats for last quarter.
"""
from __future__ import print_function
import datetime
import math
import optparse
......@@ -22,7 +25,7 @@ try:
import dateutil.parser
from dateutil.relativedelta import relativedelta
except ImportError:
print 'python-dateutil package required'
print('python-dateutil package required')
exit(1)
......@@ -214,13 +217,13 @@ def print_issue(issue, reviewer, stats):
reviewed = ''
# More information is available, print issue.keys() to see them.
print '%7d %10s %3s %14s %-15s %s' % (
print('%7d %10s %3s %14s %-15s %s' % (
issue['issue'],
issue['created'][:10],
reviewed,
latency,
issue['owner_email'],
', '.join(sorted(issue['reviewers'])))
', '.join(sorted(issue['reviewers']))))
def print_reviews(
......@@ -232,8 +235,9 @@ def print_reviews(
stats = Stats()
# Column sizes need to match print_issue() output.
print >> sys.stderr, (
'Issue Creation Did Latency Owner Reviewers')
print(
'Issue Creation Did Latency Owner Reviewers',
file=sys.stderr)
# See def search() in rietveld.py to see all the filters you can use.
issues = []
......@@ -253,39 +257,40 @@ def print_reviews(
last_day = issues[-1]['created'][:10]
stats.finalize(first_day, last_day)
print >> sys.stderr, (
print(
'%s reviewed %d issues out of %d (%1.1f%%). %d were self-review.' %
(reviewer, stats.actually_reviewed, stats.total, stats.percent_done,
stats.self_review))
print >> sys.stderr, (
'%4.1f review request/day during %3d days (%4.1f r/d done).' % (
stats.review_per_day, stats.days, stats.review_done_per_day))
print >> sys.stderr, (
'%4d were drive-bys (%5.1f%% of reviews done).' % (
stats.drive_by, stats.percent_drive_by))
print >> sys.stderr, (
'%4d were requested over IM or irc (%5.1f%% of reviews done).' % (
stats.not_requested, stats.percent_not_requested))
print >> sys.stderr, (
('%4d issues LGTM\'d (%5.1f%% of reviews done),'
' gave multiple LGTMs on %d issues.') % (
stats.lgtms, stats.percent_lgtm, stats.multiple_lgtms))
print >> sys.stderr, (
stats.self_review), file=sys.stderr)
print(
'%4.1f review request/day during %3d days (%4.1f r/d done).' %
(stats.review_per_day, stats.days, stats.review_done_per_day),
file=sys.stderr)
print(
'%4d were drive-bys (%5.1f%% of reviews done).' %
(stats.drive_by, stats.percent_drive_by), file=sys.stderr)
print(
'%4d were requested over IM or irc (%5.1f%% of reviews done).' %
(stats.not_requested, stats.percent_not_requested), file=sys.stderr)
print(
'%4d issues LGTM\'d (%5.1f%% of reviews done),'
' gave multiple LGTMs on %d issues.' %
(stats.lgtms, stats.percent_lgtm, stats.multiple_lgtms), file=sys.stderr)
print(
'Average latency from request to first comment is %s.' %
to_time(stats.average_latency))
print >> sys.stderr, (
to_time(stats.average_latency), file=sys.stderr)
print(
'Median latency from request to first comment is %s.' %
to_time(stats.median_latency))
to_time(stats.median_latency), file=sys.stderr)
def print_count(
reviewer, created_after, created_before, instance_url, auth_config):
remote = rietveld.Rietveld(instance_url, auth_config)
print len(list(remote.search(
print(len(list(remote.search(
reviewer=reviewer,
created_after=created_after,
created_before=created_before,
keys_only=True)))
keys_only=True))))
def get_previous_quarter(today):
......@@ -354,12 +359,12 @@ def main():
if options.reviewer is None:
parser.error('$EMAIL_ADDRESS and $USER are not set, please use -r')
print >> sys.stderr, 'Searching for reviews by %s' % options.reviewer
print('Searching for reviews by %s' % options.reviewer, file=sys.stderr)
if options.last_quarter:
options.begin = begin
options.end = end
print >> sys.stderr, 'Using range %s to %s' % (
options.begin, options.end)
print('Using range %s to %s' %
(options.begin, options.end), file=sys.stderr)
else:
if options.begin is None or options.end is None:
parser.error('Please specify either --last_quarter or --begin and --end')
......
......@@ -3,6 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import os
import subprocess
import json
......@@ -40,7 +42,7 @@ def SaveConfig(config):
def ShowMessage(countdown):
whitelisted = '\n'.join([' * %s' % config for config in
ninjalog_uploader.WHITELISTED_CONFIGS])
print """
print("""
Your ninjalog will be uploaded to build stats server. The uploaded log will be
used to analyze user side build performance.
......@@ -67,7 +69,7 @@ You can find a more detailed explanation in
%s
""" % (whitelisted, countdown, __file__, __file__,
os.path.abspath(os.path.join(THIS_DIR, "ninjalog.README.md")))
os.path.abspath(os.path.join(THIS_DIR, "ninjalog.README.md"))))
def main():
......
......@@ -4,6 +4,8 @@
"""Interactive tool for finding reviewers/owners for a change."""
from __future__ import print_function
import os
import copy
import owners as owners_module
......@@ -354,7 +356,7 @@ class OwnersFinder(object):
return ' ' * self.indentation
def writeln(self, text=''):
print self.print_indent() + text
print(self.print_indent() + text)
def hr(self):
self.writeln('=====================')
......
......@@ -56,6 +56,8 @@ will have a weighted time that is the same or similar to its elapsed time. A
compile that runs in parallel with 999 other compiles will have a weighted time
that is tiny."""
from __future__ import print_function
import argparse
import errno
import os
......@@ -100,7 +102,7 @@ class Target:
# Allow for modest floating-point errors
epsilon = 0.000002
if (self.weighted_duration > self.Duration() + epsilon):
print '%s > %s?' % (self.weighted_duration, self.Duration())
print('%s > %s?' % (self.weighted_duration, self.Duration()))
assert(self.weighted_duration <= self.Duration() + epsilon)
return self.weighted_duration
......@@ -256,16 +258,16 @@ def SummarizeEntries(entries):
# Warn if the sum of weighted times is off by more than half a second.
if abs(length - weighted_total) > 500:
print 'Discrepancy!!! Length = %.3f, weighted total = %.3f' % (
length, weighted_total)
print('Discrepancy!!! Length = %.3f, weighted total = %.3f' % (
length, weighted_total))
# Print the slowest build steps (by weighted time).
print ' Longest build steps:'
print(' Longest build steps:')
entries.sort(key=lambda x: x.WeightedDuration())
for target in entries[-long_count:]:
print ' %8.1f weighted s to build %s (%.1f s CPU time)' % (
print(' %8.1f weighted s to build %s (%.1f s CPU time)' % (
target.WeightedDuration(),
target.DescribeTargets(), target.Duration())
target.DescribeTargets(), target.Duration()))
# Sum up the time by file extension/type of the output file
count_by_ext = {}
......@@ -279,21 +281,21 @@ def SummarizeEntries(entries):
0) + target.WeightedDuration()
count_by_ext[extension] = count_by_ext.get(extension, 0) + 1
print ' Time by build-step type:'
print(' Time by build-step type:')
# Copy to a list with extension name and total time swapped, to (time, ext)
weighted_time_by_ext_sorted = sorted((y, x) for (x, y) in
weighted_time_by_ext.items())
# Print the slowest build target types (by weighted time):
for time, extension in weighted_time_by_ext_sorted[-long_ext_count:]:
print (' %8.1f s weighted time to generate %d %s files '
'(%1.1f s CPU time)') % (time, count_by_ext[extension],
extension, time_by_ext[extension])
print(' %8.1f s weighted time to generate %d %s files '
'(%1.1f s CPU time)' % (time, count_by_ext[extension],
extension, time_by_ext[extension]))
print ' %.1f s weighted time (%.1f s CPU time, %1.1fx parallelism)' % (
print(' %.1f s weighted time (%.1f s CPU time, %1.1fx parallelism)' % (
length, total_cpu_time,
total_cpu_time * 1.0 / length)
print ' %d build steps completed, average of %1.2f/s' % (
len(entries), len(entries) / (length))
total_cpu_time * 1.0 / length))
print(' %d build steps completed, average of %1.2f/s' % (
len(entries), len(entries) / (length)))
def main():
......@@ -314,7 +316,7 @@ def main():
entries = ReadTargets(log, False)
SummarizeEntries(entries)
except IOError:
print 'Log file %r not found, no build summary created.' % log_file
print('Log file %r not found, no build summary created.' % log_file)
return errno.ENOENT
......
......@@ -4,6 +4,8 @@
"""Generic presubmit checks that can be reused by other presubmit checks."""
from __future__ import print_function
import os as _os
_HERE = _os.path.dirname(_os.path.abspath(__file__))
......@@ -1111,7 +1113,7 @@ def PanProjectChecks(input_api, output_api,
if snapshot_memory:
delta_ms = int(1000*(dt2 - snapshot_memory[0]))
if delta_ms > 500:
print " %s took a long time: %dms" % (snapshot_memory[1], delta_ms)
print(" %s took a long time: %dms" % (snapshot_memory[1], delta_ms))
snapshot_memory[:] = (dt2, msg)
snapshot("checking owners files format")
......
......@@ -6,6 +6,8 @@
"""Enables directory-specific presubmit checks to run at upload and/or commit.
"""
from __future__ import print_function
__version__ = '1.8.0'
# TODO(joi) Add caching where appropriate/needed. The API is designed to allow
......@@ -1715,8 +1717,8 @@ def main(argv=None):
options.parallel)
return not results.should_continue()
except PresubmitFailure, e:
print >> sys.stderr, e
print >> sys.stderr, 'Maybe your depot_tools is out of date?'
print(e, file=sys.stderr)
print('Maybe your depot_tools is out of date?', file=sys.stderr)
return 2
......
......@@ -14,6 +14,8 @@ The following hypothesis are made:
- A patch set cannot be modified
"""
from __future__ import print_function
import copy
import errno
import json
......@@ -474,7 +476,7 @@ class Rietveld(object):
# If reaching this line, loop again. Uses a small backoff.
time.sleep(min(10, 1+retry*2))
except urllib2.HTTPError as e:
print 'Request to %s failed: %s' % (e.geturl(), e.read())
print('Request to %s failed: %s' % (e.geturl(), e.read()))
raise
finally:
upload.ErrorExit = old_error_exit
......
......@@ -9,6 +9,8 @@ Works only with git checkout and git dependencies. Currently this
script will always roll to the tip of to origin/master.
"""
from __future__ import print_function
import argparse
import collections
import gclient_eval
......
......@@ -19,6 +19,8 @@ $ git add DEPS
$ git commit
"""
from __future__ import print_function
import ast
import optparse
import os
......@@ -134,8 +136,8 @@ def convert_svn_revision(dep_path, revision):
try:
svn_rev = int(line.split()[1].partition('@')[2])
except (IndexError, ValueError):
print >> sys.stderr, (
'WARNING: Could not parse svn revision out of "%s"' % line)
print('WARNING: Could not parse svn revision out of "%s"' % line,
file=sys.stderr)
continue
if not latest_svn_rev or int(svn_rev) > int(latest_svn_rev):
latest_svn_rev = svn_rev
......@@ -357,15 +359,15 @@ def update_deps(deps_file, dep_path, dep_name, new_rev, comment):
commit_msg = generate_commit_message(
deps_locals['deps_os'][os_name.s], dep_path, dep_name, new_rev)
if not commit_msg:
print 'Could not find an entry in %s to update.' % deps_file
print('Could not find an entry in %s to update.' % deps_file)
return 1
print 'Pinning %s' % dep_name
print 'to revision %s' % new_rev
print 'in %s' % deps_file
print('Pinning %s' % dep_name)
print('to revision %s' % new_rev)
print('in %s' % deps_file)
with open(deps_file, 'w') as fh:
for line in deps_lines:
print >> fh, line
print(line, file=fh)
deps_file_dir = os.path.normpath(os.path.dirname(deps_file))
deps_file_root = Popen(
['git', 'rev-parse', '--show-toplevel'],
......@@ -396,7 +398,7 @@ def main(argv):
# Only require the path to exist if the revision should be verified. A path
# to e.g. os deps might not be checked out.
if not os.path.isdir(dep_path):
print >> sys.stderr, 'No such directory: %s' % arg_dep_path
print('No such directory: %s' % arg_dep_path, file=sys.stderr)
return 1
if len(args) > 2:
deps_file = args[2]
......@@ -407,9 +409,9 @@ def main(argv):
dep_name = posix_path(os.path.relpath(dep_path, gclient_root))
if options.no_verify_revision:
if not is_git_hash(revision):
print >> sys.stderr, (
print(
'The passed revision %s must be a git hash when skipping revision '
'verification.' % revision)
'verification.' % revision, file=sys.stderr)
return 1
git_rev = revision
comment = None
......@@ -417,7 +419,8 @@ def main(argv):
git_rev, svn_rev = get_git_revision(dep_path, revision)
comment = ('from svn revision %s' % svn_rev) if svn_rev else None
if not git_rev:
print >> sys.stderr, 'Could not find git revision matching %s.' % revision
print('Could not find git revision matching %s.' % revision,
file=sys.stderr)
return 1
return update_deps(deps_file, dep_path, dep_name, git_rev, comment)
......
......@@ -5,6 +5,8 @@
"""Splits a branch into smaller branches and uploads CLs."""
from __future__ import print_function
import collections
import os
import re
......@@ -95,7 +97,7 @@ def UploadCl(refactor_branch, refactor_branch_upstream, directory, files,
# Create a branch.
if not CreateBranchForDirectory(
refactor_branch, directory, refactor_branch_upstream):
print 'Skipping ' + directory + ' for which a branch already exists.'
print('Skipping ' + directory + ' for which a branch already exists.')
return
# Checkout all changes to files in |files|.
......@@ -124,7 +126,7 @@ def UploadCl(refactor_branch, refactor_branch_upstream, directory, files,
upload_args.append('--send-mail')
if enable_auto_submit:
upload_args.append('--enable-auto-submit')
print 'Uploading CL for ' + directory + '.'
print('Uploading CL for ' + directory + '.')
cmd_upload(upload_args)
if comment:
changelist().AddComment(FormatDescriptionOrComment(comment, directory),
......@@ -162,12 +164,12 @@ def PrintClInfo(cl_index, num_cls, directory, file_paths, description,
directory).splitlines()
indented_description = '\n'.join([' ' + l for l in description_lines])
print 'CL {}/{}'.format(cl_index, num_cls)
print 'Path: {}'.format(directory)
print 'Reviewers: {}'.format(', '.join(reviewers))
print '\n' + indented_description + '\n'
print '\n'.join(file_paths)
print
print('CL {}/{}'.format(cl_index, num_cls))
print('Path: {}'.format(directory))
print('Reviewers: {}'.format(', '.join(reviewers)))
print('\n' + indented_description + '\n')
print('\n'.join(file_paths))
print()
def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
......@@ -197,7 +199,7 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
files = change.AffectedFiles()
if not files:
print 'Cannot split an empty CL.'
print('Cannot split an empty CL.')
return 1
author = git.run('config', 'user.email').strip() or None
......@@ -216,12 +218,12 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
print('Will split current branch (' + refactor_branch + ') into ' +
str(num_cls) + ' CLs.\n')
if cq_dry_run and num_cls > CL_SPLIT_FORCE_LIMIT:
print (
print(
'This will generate "%r" CLs. This many CLs can potentially generate'
' too much load on the build infrastructure. Please email'
' infra-dev@chromium.org to ensure that this won\'t break anything.'
' The infra team reserves the right to cancel your jobs if they are'
' overloading the CQ.') % num_cls
' overloading the CQ.' % num_cls)
answer = raw_input('Proceed? (y/n):')
if answer.lower() != 'y':
return 0
......
......@@ -2,6 +2,8 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import distutils.version
import os
import sys
......@@ -13,12 +15,12 @@ ROOT_PATH = os.path.abspath(os.path.join(
def native_error(msg, version):
print textwrap.dedent("""\
print(textwrap.dedent("""\
ERROR: Native python-coverage (version: %s) is required to be
installed on your PYTHONPATH to run this test. Recommendation:
sudo apt-get install pip
sudo pip install --upgrade coverage
%s""") % (version, msg)
%s""") % (version, msg))
sys.exit(1)
def covered_main(includes, require_native=None, required_percentage=100.0,
......@@ -56,7 +58,7 @@ def covered_main(includes, require_native=None, required_percentage=100.0,
sys.path.insert(0, os.path.join(ROOT_PATH, 'third_party'))
import coverage
else:
print ("ERROR: python-coverage (%s) is required to be installed on your "
print("ERROR: python-coverage (%s) is required to be installed on your "
"PYTHONPATH to run this test." % require_native)
sys.exit(1)
......@@ -71,7 +73,7 @@ def covered_main(includes, require_native=None, required_percentage=100.0,
COVERAGE.stop()
if COVERAGE.report() < required_percentage:
print 'FATAL: not at required %f%% coverage.' % required_percentage
print('FATAL: not at required %f%% coverage.' % required_percentage)
retcode = 2
return retcode
......@@ -5,6 +5,8 @@
"""Generate fake repositories for testing."""
from __future__ import print_function
import atexit
import datetime
import errno
......@@ -931,7 +933,7 @@ class FakeReposTestBase(trial_dir.TestCase):
result = result[1:]
# The exception trace makes it hard to read so dump it too.
if '\n' in result:
print result
print(result)
self.assertEquals(expected, result, msg)
def check(self, expected, results):
......@@ -978,7 +980,7 @@ class FakeReposTestBase(trial_dir.TestCase):
def main(argv):
fake = FakeRepos()
print 'Using %s' % fake.root_dir
print('Using %s' % fake.root_dir)
try:
fake.set_up_git()
print('Fake setup, press enter to quit or Ctrl-C to keep the checkouts.')
......
......@@ -72,6 +72,8 @@ curl --retry 30 --ssl-reqd -s $url | python <(cat <<EOF
#
# ...and prints the name and md5sum of the corresponding *.war file.
from __future__ import print_function
import json
import re
import sys
......@@ -104,14 +106,14 @@ def _cmp(a, b):
if requested_version:
for info, version in items:
if version == requested_version:
print '"%s" "%s"' % (info['name'], info['md5Hash'])
print('"%s" "%s"' % (info['name'], info['md5Hash']))
sys.exit(0)
print >> sys.stderr, 'No such Gerrit version: %s' % requested_version
print('No such Gerrit version: %s' % requested_version, file=sys.stderr)
sys.exit(1)
items.sort(cmp=_cmp)
for x in items:
print '"%s" "%s"' % (x[0]['name'], x[0]['md5Hash'])
print('"%s" "%s"' % (x[0]['name'], x[0]['md5Hash']))
sys.exit(0)
EOF
) "$version" | xargs | while read name md5; do
......
......@@ -38,6 +38,8 @@ One gotcha: 'repo upload' will always attempt to use the ssh interface to talk
to gerrit.
"""
from __future__ import print_function
import collections
import errno
import netrc
......@@ -363,7 +365,7 @@ class GerritTestCase(unittest.TestCase):
# Announce that gerrit didn't shut down cleanly.
msg = 'Test gerrit server (pid=%d) did not shut down cleanly.' % (
gerrit_instance.gerrit_pid)
print >> sys.stderr, msg
print(msg, file=sys.stderr)
@classmethod
def tearDownClass(cls):
......
......@@ -4,6 +4,8 @@
"""Simplify unit tests based on pymox."""
from __future__ import print_function
import os
import random
import shutil
......@@ -68,7 +70,7 @@ class TestCaseUtils(object):
if actual_members != expected_members:
diff = ([i for i in actual_members if i not in expected_members] +
[i for i in expected_members if i not in actual_members])
print >> sys.stderr, diff
print(diff, file=sys.stderr)
# pylint: disable=no-member
self.assertEqual(actual_members, expected_members)
......
......@@ -2,6 +2,7 @@
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
import atexit
import logging
......@@ -93,5 +94,5 @@ class TestCase(auto_stub.TestCase, TrialDirMixIn):
if '-l' in sys.argv:
# See SHOULD_LEAK definition in TrialDir for its purpose.
TrialDir.SHOULD_LEAK = True
print 'Leaking!'
print('Leaking!')
sys.argv.remove('-l')
......@@ -5,6 +5,8 @@
"""Unit tests for checkout.py."""
from __future__ import print_function
import logging
import os
import shutil
......@@ -275,7 +277,7 @@ class GitBaseTest(BaseTest):
return tree
def _test_prepare(self, co):
print co.prepare(None)
print(co.prepare(None))
class GitCheckout(GitBaseTest):
......
......@@ -6,6 +6,8 @@
"""Unit tests for download_from_google_storage.py."""
from __future__ import print_function
import optparse
import os
import Queue
......@@ -77,11 +79,11 @@ class ChangedWorkingDirectory(object):
def __enter__(self):
self._old_cwd = os.getcwd()
print "Enter directory = ", self._working_directory
print("Enter directory = ", self._working_directory)
os.chdir(self._working_directory)
def __exit__(self, *_):
print "Enter directory = ", self._old_cwd
print("Enter directory = ", self._old_cwd)
os.chdir(self._old_cwd)
......
......@@ -6,6 +6,8 @@
"""Unit tests for fix_encoding.py."""
from __future__ import print_function
import os
import sys
import unittest
......@@ -21,18 +23,18 @@ class FixEncodingTest(unittest.TestCase):
def test_code_page(self):
# Make sure printing garbage won't throw.
print self.text.encode() + '\xff'
print >> sys.stderr, self.text.encode() + '\xff'
print(self.text.encode() + '\xff')
print(self.text.encode() + '\xff', file=sys.stderr)
def test_utf8(self):
# Make sure printing utf-8 works.
print self.text.encode('utf-8')
print >> sys.stderr, self.text.encode('utf-8')
print(self.text.encode('utf-8'))
print(self.text.encode('utf-8'), file=sys.stderr)
def test_unicode(self):
# Make sure printing unicode works.
print self.text
print >> sys.stderr, self.text
print(self.text)
print(self.text, file=sys.stderr)
def test_default_encoding(self):
self.assertEquals('utf-8', sys.getdefaultencoding())
......
......@@ -1192,7 +1192,7 @@ class TestGitCl(TestCase):
short_hostname=short_hostname,
labels=labels)
# Uncomment when debugging.
# print '\n'.join(map(lambda x: '%2i: %s' % x, enumerate(self.calls)))
# print('\n'.join(map(lambda x: '%2i: %s' % x, enumerate(self.calls))))
git_cl.main(['upload'] + upload_args)
def test_gerrit_upload_without_change_id(self):
......
......@@ -5,6 +5,8 @@
"""Unit tests for git_common.py"""
from __future__ import print_function
import binascii
import collections
import datetime
......@@ -807,12 +809,12 @@ class GitFreezeThaw(git_test_utils.GitRepoReadWriteTestBase):
def testAll(self):
def inner():
with open('some/files/file2', 'a') as f2:
print >> f2, 'cool appended line'
print('cool appended line', file=f2)
os.mkdir('some/other_files')
with open('some/other_files/subdir_file', 'w') as f3:
print >> f3, 'new file!'
print('new file!', file=f3)
with open('some/files/file5', 'w') as f5:
print >> f5, 'New file!1!one!'
print('New file!1!one!', file=f5)
STATUS_1 = '\n'.join((
' M some/files/file2',
......@@ -829,7 +831,7 @@ class GitFreezeThaw(git_test_utils.GitRepoReadWriteTestBase):
# Freeze group 2
with open('some/files/file2', 'a') as f2:
print >> f2, 'new! appended line!'
print('new! appended line!', file=f2)
self.assertEquals(self.repo.git('status', '--porcelain').stdout,
' M some/files/file2\n')
self.assertIsNone(self.gc.freeze())
......
......@@ -184,9 +184,10 @@ class PresubmitUnittest(PresubmitTestsBase):
'gerrit_util', 'git_footers', 'glob', 'inspect', 'itertools', 'json',
'load_files', 'logging', 'main', 'marshal', 'multiprocessing',
'normpath', 'optparse', 'os', 'owners', 'owners_finder', 'pickle',
'presubmit_canned_checks', 'random', 're', 'scm', 'sigint_handler',
'signal', 'subprocess', 'sys', 'tempfile', 'threading', 'time',
'traceback', 'types', 'unittest', 'urllib2', 'urlparse', 'warn'
'presubmit_canned_checks', 'print_function', 'random', 're', 'scm',
'sigint_handler', 'signal', 'subprocess', 'sys', 'tempfile',
'threading', 'time', 'traceback', 'types', 'unittest', 'urllib2',
'urlparse', 'warn'
]
# If this test fails, you should add the relevant test.
self.compareMembers(presubmit, members)
......@@ -1670,6 +1671,7 @@ class CannedChecksUnittest(PresubmitTestsBase):
'GetUnitTests', 'GetUnitTestsInDirectory', 'GetUnitTestsRecursively',
'CheckCIPDManifest', 'CheckCIPDPackages', 'CheckCIPDClientDigests',
'CheckChangedLUCIConfigs', 'CheckLucicfgGenOutput',
'print_function',
]
# If this test fails, you should add the relevant test.
self.compareMembers(presubmit_canned_checks, members)
......
......@@ -2553,7 +2553,7 @@ def main():
os.environ['LC_ALL'] = 'C'
RealMain(sys.argv)
except KeyboardInterrupt:
print
print()
StatusUpdate("Interrupted.")
sys.exit(1)
except auth.AuthenticationError as e:
......
......@@ -5,6 +5,8 @@
"""Uploads files to Google Storage content addressed."""
from __future__ import print_function
import hashlib
import optparse
import os
......@@ -170,7 +172,7 @@ def upload_to_google_storage(
with open(filename + '.sha1', 'rb') as f:
sha1_file = f.read(1024)
if not re.match('^([a-z0-9]{40})$', sha1_file):
print >> sys.stderr, 'Invalid sha1 hash file %s.sha1' % filename
print('Invalid sha1 hash file %s.sha1' % filename, file=sys.stderr)
return 1
upload_queue.put((filename, sha1_file))
continue
......@@ -191,19 +193,19 @@ def upload_to_google_storage(
printer_thread.join()
# Print timing information.
print 'Hashing %s files took %1f seconds' % (
len(input_filenames), hashing_duration)
print 'Uploading took %1f seconds' % (time.time() - upload_timer)
print('Hashing %s files took %1f seconds' % (
len(input_filenames), hashing_duration))
print('Uploading took %1f seconds' % (time.time() - upload_timer))
# See if we ran into any errors.
max_ret_code = 0
for ret_code, message in ret_codes.queue:
max_ret_code = max(ret_code, max_ret_code)
if message:
print >> sys.stderr, message
print(message, file=sys.stderr)
if not max_ret_code:
print 'Success!'
print('Success!')
return max_ret_code
......
......@@ -17,6 +17,8 @@ the watchers for files given on the command line. This is useful to verify
changes to WATCHLISTS files.
"""
from __future__ import print_function
import logging
import os
import re
......@@ -126,12 +128,12 @@ class Watchlists(object):
def main(argv):
# Confirm that watchlists can be parsed and spew out the watchers
if len(argv) < 2:
print "Usage (from the base of repo):"
print " %s [file-1] [file-2] ...." % argv[0]
print("Usage (from the base of repo):")
print(" %s [file-1] [file-2] ...." % argv[0])
return 1
wl = Watchlists(os.getcwd())
watchers = wl.GetWatchersForPaths(argv[1:])
print watchers
print(watchers)
if __name__ == '__main__':
......
......@@ -6,6 +6,8 @@
"""Display log of checkins of one particular developer since a particular
date. Only works on git dependencies at the moment."""
from __future__ import print_function
import gclient_utils
import optparse
import os
......@@ -25,8 +27,8 @@ def show_log(path, authors, since='1 week ago'):
stdout=subprocess.PIPE).communicate()[0].rstrip()
if len(status.splitlines()) > 0:
print '---------- %s ----------' % path
print status
print('---------- %s ----------' % path)
print(status)
def main():
......
......@@ -26,6 +26,8 @@ future when a hypothetical VS2015 is released, the 2013 script will be
maintained, and a new 2015 script would be added.
"""
from __future__ import print_function
import hashlib
import json
import optparse
......@@ -52,12 +54,12 @@ elif sys.platform == "cygwin":
try:
import cygwinreg as winreg
except ImportError:
print ''
print 'CygWin does not natively support winreg but a replacement exists.'
print 'https://pypi.python.org/pypi/cygwinreg/'
print ''
print 'Try: easy_install cygwinreg'
print ''
print('')
print('CygWin does not natively support winreg but a replacement exists.')
print('https://pypi.python.org/pypi/cygwinreg/')
print('')
print('Try: easy_install cygwinreg')
print('')
raise
BASEDIR = os.path.dirname(os.path.abspath(__file__))
......@@ -144,25 +146,25 @@ def CalculateHash(root, expected_hash):
timestamps_data_files.append(f[0])
missing_files = [f for f in timestamps_data_files if f not in file_list]
if len(missing_files):
print ('%d files missing from the %s version of the toolchain:' %
print('%d files missing from the %s version of the toolchain:' %
(len(missing_files), expected_hash))
for f in missing_files[:10]:
print '\t%s' % f
print('\t%s' % f)
if len(missing_files) > 10:
print '\t...'
print('\t...')
extra_files = [f for f in file_list if f not in timestamps_data_files]
if len(extra_files):
print ('%d extra files in the %s version of the toolchain:' %
print('%d extra files in the %s version of the toolchain:' %
(len(extra_files), expected_hash))
for f in extra_files[:10]:
print '\t%s' % f
print('\t%s' % f)
if len(extra_files) > 10:
print '\t...'
print('\t...')
if matches:
return timestamps_data['sha1']
# Make long hangs when updating the toolchain less mysterious.
print 'Calculating hash of toolchain in %s. Please wait...' % full_root_path
print('Calculating hash of toolchain in %s. Please wait...' % full_root_path)
sys.stdout.flush()
digest = hashlib.sha1()
for path in file_list:
......@@ -189,7 +191,7 @@ def CalculateToolchainHashes(root, remove_corrupt_toolchains):
for d in dir_list:
toolchain_hash = CalculateHash(root, d)
if toolchain_hash != d:
print ('The hash of a version of the toolchain has an unexpected value ('
print('The hash of a version of the toolchain has an unexpected value ('
'%s instead of %s)%s.' % (toolchain_hash, d,
', removing it' if remove_corrupt_toolchains else ''))
if remove_corrupt_toolchains:
......@@ -259,23 +261,23 @@ def RequestGsAuthentication():
Googler. This allows much faster downloads, and pulling (old) toolchains
that match src/ revisions.
"""
print 'Access to gs://chrome-wintoolchain/ not configured.'
print '-----------------------------------------------------------------'
print
print 'You appear to be a Googler.'
print
print 'I\'m sorry for the hassle, but you need to do a one-time manual'
print 'authentication. Please run:'
print
print ' download_from_google_storage --config'
print
print 'and follow the instructions.'
print
print 'NOTE 1: Use your google.com credentials, not chromium.org.'
print 'NOTE 2: Enter 0 when asked for a "project-id".'
print
print '-----------------------------------------------------------------'
print
print('Access to gs://chrome-wintoolchain/ not configured.')
print('-----------------------------------------------------------------')
print()
print('You appear to be a Googler.')
print()
print('I\'m sorry for the hassle, but you need to do a one-time manual')
print('authentication. Please run:')
print()
print(' download_from_google_storage --config')
print()
print('and follow the instructions.')
print()
print('NOTE 1: Use your google.com credentials, not chromium.org.')
print('NOTE 2: Enter 0 when asked for a "project-id".')
print()
print('-----------------------------------------------------------------')
print()
sys.stdout.flush()
sys.exit(1)
......@@ -289,7 +291,7 @@ def DelayBeforeRemoving(target_dir):
'\rRemoving old toolchain in %ds... (Ctrl-C to cancel)' % i)
sys.stdout.flush()
time.sleep(1)
print
print()
def DownloadUsingHttp(filename):
......@@ -396,7 +398,7 @@ def RemoveUnusedToolchains(root):
os.remove(full_path)
for d in dirs_to_remove:
print ('Removing %s as it doesn\'t correspond to any known toolchain.' %
print('Removing %s as it doesn\'t correspond to any known toolchain.' %
os.path.join(root, d))
# Use the RemoveToolchain function to remove these directories as they might
# contain an older version of the toolchain.
......@@ -408,7 +410,7 @@ def RemoveUnusedToolchains(root):
for toolchain in valid_toolchains:
toolchain_age_in_sec = time.time() - toolchain[0]
if toolchain_age_in_sec > toolchain_expiration_time:
print ('Removing version %s of the Win toolchain as it hasn\'t been used'
print('Removing version %s of the Win toolchain as it hasn\'t been used'
' in the past %d days.' % (toolchain[1],
toolchain_age_in_sec / 60 / 60 / 24))
RemoveToolchain(root, toolchain[1], True)
......@@ -569,10 +571,10 @@ def main():
if got_new_toolchain:
current_hashes = CalculateToolchainHashes(target_dir, False)
if desired_hash not in current_hashes:
print >> sys.stderr, (
print(
'Got wrong hash after pulling a new toolchain. '
'Wanted \'%s\', got one of \'%s\'.' % (
desired_hash, ', '.join(current_hashes)))
desired_hash, ', '.join(current_hashes)), file=sys.stderr)
return 1
SaveTimestampsAndHash(target_dir, desired_hash)
......
......@@ -30,6 +30,8 @@ useful as the resulting zip can't be redistributed, and most will presumably
have a Pro license anyway).
"""
from __future__ import print_function
import collections
import glob
import json
......@@ -409,14 +411,14 @@ def AddEnvSetup(files):
'win_sdk\\bin\\SetEnv.arm64.json'))
vs_version_file = os.path.join(tempdir, 'VS_VERSION')
with open(vs_version_file, 'wb') as version:
print >>version, VS_VERSION
print(VS_VERSION, file=version)
files.append((vs_version_file, 'VS_VERSION'))
def RenameToSha1(output):
"""Determine the hash in the same way that the unzipper does to rename the
# .zip file."""
print 'Extracting to determine hash...'
print('Extracting to determine hash...')
tempdir = tempfile.mkdtemp()
old_dir = os.getcwd()
os.chdir(tempdir)
......@@ -424,13 +426,13 @@ def RenameToSha1(output):
with zipfile.ZipFile(
os.path.join(old_dir, output), 'r', zipfile.ZIP_DEFLATED, True) as zf:
zf.extractall(rel_dir)
print 'Hashing...'
print('Hashing...')
sha1 = get_toolchain_if_necessary.CalculateHash(rel_dir, None)
os.chdir(old_dir)
shutil.rmtree(tempdir)
final_name = sha1 + '.zip'
os.rename(output, final_name)
print 'Renamed %s to %s.' % (output, final_name)
print('Renamed %s to %s.' % (output, final_name))
def main():
......@@ -454,7 +456,7 @@ def main():
files = BuildRepackageFileList(options.repackage_dir)
else:
if len(args) != 1 or args[0] not in ('2015', '2017'):
print 'Must specify 2015 or 2017'
print('Must specify 2015 or 2017')
parser.print_help();
return 1
......@@ -462,7 +464,7 @@ def main():
if (not os.path.exists(os.path.join(options.override_dir, 'bin')) or
not os.path.exists(os.path.join(options.override_dir, 'include')) or
not os.path.exists(os.path.join(options.override_dir, 'lib'))):
print 'Invalid override directory - must contain bin/include/lib dirs'
print('Invalid override directory - must contain bin/include/lib dirs')
return 1
global VS_VERSION
......@@ -478,14 +480,14 @@ def main():
else:
VC_TOOLS = 'VC'
print 'Building file list for VS %s Windows %s...' % (VS_VERSION, WIN_VERSION)
print('Building file list for VS %s Windows %s...' % (VS_VERSION, WIN_VERSION))
files = BuildFileList(options.override_dir)
AddEnvSetup(files)
if False:
for f in files:
print f[0], '->', f[1]
print(f[0], '->', f[1])
return 0
output = 'out.zip'
......
......@@ -5,6 +5,8 @@
"""Display active git branches and code changes in a chromiumos workspace."""
from __future__ import print_function
import gclient_utils
import os
import re
......@@ -38,7 +40,7 @@ def show_dir(full_name, relative_name, color):
if lines_printed == 0:
show_name()
lines_printed += 1
print branch
print(branch)
status = subprocess.Popen(['git', 'status'],
cwd=full_name,
......@@ -48,8 +50,8 @@ def show_dir(full_name, relative_name, color):
if lines_printed == 0:
show_name()
if lines_printed == 1:
print '---------------'
print status
print('---------------')
print(status)
def main():
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment