Commit b946b238 authored by Raul Tambre's avatar Raul Tambre Committed by Commit Bot

Improve gclient Python 3 compatibility

This enables gclient sync and gclient runhooks to run, barring hook script failures.
git cl upload also now works.

The scripts still work with Python 2.
There are no intended behaviour changes.

Bug: 942522
Change-Id: I2ac587b5f803ba7f5bb5e412337ce049f4b1a741
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/1524583
Commit-Queue: Raul Tambre <raul@tambre.ee>
Reviewed-by: 's avatarDirk Pranke <dpranke@chromium.org>
parent b45f6428
...@@ -5,6 +5,8 @@ ...@@ -5,6 +5,8 @@
"""Outputs host CPU architecture in format recognized by gyp.""" """Outputs host CPU architecture in format recognized by gyp."""
from __future__ import print_function
import platform import platform
import re import re
import sys import sys
...@@ -51,4 +53,4 @@ def DoMain(_): ...@@ -51,4 +53,4 @@ def DoMain(_):
return HostArch() return HostArch()
if __name__ == '__main__': if __name__ == '__main__':
print DoMain([]) print(DoMain([]))
...@@ -5,11 +5,17 @@ ...@@ -5,11 +5,17 @@
"""Download files from Google Storage based on SHA1 sums.""" """Download files from Google Storage based on SHA1 sums."""
from __future__ import print_function
import hashlib import hashlib
import optparse import optparse
import os import os
import Queue
try:
import Queue as queue
except ImportError: # For Py3 compatibility
import queue
import re import re
import shutil import shutil
import stat import stat
...@@ -110,13 +116,13 @@ class Gsutil(object): ...@@ -110,13 +116,13 @@ class Gsutil(object):
timeout=self.timeout) timeout=self.timeout)
# Parse output. # Parse output.
status_code_match = re.search('status=([0-9]+)', err) status_code_match = re.search(b'status=([0-9]+)', err)
if status_code_match: if status_code_match:
return (int(status_code_match.group(1)), out, err) return (int(status_code_match.group(1)), out, err)
if ('You are attempting to access protected data with ' if (b'You are attempting to access protected data with '
'no configured credentials.' in err): b'no configured credentials.' in err):
return (403, out, err) return (403, out, err)
if 'matched no objects' in err: if b'matched no objects' in err:
return (404, out, err) return (404, out, err)
return (code, out, err) return (code, out, err)
...@@ -164,15 +170,15 @@ def enumerate_input(input_filename, directory, recursive, ignore_errors, output, ...@@ -164,15 +170,15 @@ def enumerate_input(input_filename, directory, recursive, ignore_errors, output,
if not os.path.exists(input_filename): if not os.path.exists(input_filename):
if not ignore_errors: if not ignore_errors:
raise FileNotFoundError('%s not found.' % input_filename) raise FileNotFoundError('%s not found.' % input_filename)
print >> sys.stderr, '%s not found.' % input_filename print('%s not found.' % input_filename, file=sys.stderr)
with open(input_filename, 'rb') as f: with open(input_filename, 'rb') as f:
sha1_match = re.match('^([A-Za-z0-9]{40})$', f.read(1024).rstrip()) sha1_match = re.match(b'^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
if sha1_match: if sha1_match:
yield (sha1_match.groups(1)[0], output) yield (sha1_match.groups(1)[0], output)
return return
if not ignore_errors: if not ignore_errors:
raise InvalidFileError('No sha1 sum found in %s.' % input_filename) raise InvalidFileError('No sha1 sum found in %s.' % input_filename)
print >> sys.stderr, 'No sha1 sum found in %s.' % input_filename print('No sha1 sum found in %s.' % input_filename, file=sys.stderr)
return return
if not directory: if not directory:
...@@ -198,20 +204,20 @@ def enumerate_input(input_filename, directory, recursive, ignore_errors, output, ...@@ -198,20 +204,20 @@ def enumerate_input(input_filename, directory, recursive, ignore_errors, output,
'the path of %s' % full_path) 'the path of %s' % full_path)
if not ignore_errors: if not ignore_errors:
raise InvalidFileError(err) raise InvalidFileError(err)
print >> sys.stderr, err print(err, file=sys.stderr)
continue continue
current_platform = PLATFORM_MAPPING[sys.platform] current_platform = PLATFORM_MAPPING[sys.platform]
if current_platform != target_platform: if current_platform != target_platform:
continue continue
with open(full_path, 'rb') as f: with open(full_path, 'rb') as f:
sha1_match = re.match('^([A-Za-z0-9]{40})$', f.read(1024).rstrip()) sha1_match = re.match(b'^([A-Za-z0-9]{40})$', f.read(1024).rstrip())
if sha1_match: if sha1_match:
yield (sha1_match.groups(1)[0], full_path.replace('.sha1', '')) yield (sha1_match.groups(1)[0], full_path.replace('.sha1', ''))
else: else:
if not ignore_errors: if not ignore_errors:
raise InvalidFileError('No sha1 sum found in %s.' % filename) raise InvalidFileError('No sha1 sum found in %s.' % filename)
print >> sys.stderr, 'No sha1 sum found in %s.' % filename print('No sha1 sum found in %s.' % filename, file=sys.stderr)
def _validate_tar_file(tar, prefix): def _validate_tar_file(tar, prefix):
...@@ -246,7 +252,7 @@ def _downloader_worker_thread(thread_num, q, force, base_url, ...@@ -246,7 +252,7 @@ def _downloader_worker_thread(thread_num, q, force, base_url,
if get_sha1(output_filename) == input_sha1_sum: if get_sha1(output_filename) == input_sha1_sum:
continue continue
# Check if file exists. # Check if file exists.
file_url = '%s/%s' % (base_url, input_sha1_sum) file_url = '%s/%s' % (base_url, input_sha1_sum.decode())
(code, _, err) = gsutil.check_call('ls', file_url) (code, _, err) = gsutil.check_call('ls', file_url)
if code != 0: if code != 0:
if code == 404: if code == 404:
...@@ -256,10 +262,10 @@ def _downloader_worker_thread(thread_num, q, force, base_url, ...@@ -256,10 +262,10 @@ def _downloader_worker_thread(thread_num, q, force, base_url,
file_url, output_filename))) file_url, output_filename)))
else: else:
# Other error, probably auth related (bad ~/.boto, etc). # Other error, probably auth related (bad ~/.boto, etc).
out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' % ( out_q.put('%d> Failed to fetch file %s for %s, skipping. [Err: %s]' %
thread_num, file_url, output_filename, err)) (thread_num, file_url, output_filename, err.decode()))
ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' % ( ret_codes.put((1, 'Failed to fetch file %s for %s. [Err: %s]' %
file_url, output_filename, err))) (file_url, output_filename, err.decode())))
continue continue
# Fetch the file. # Fetch the file.
out_q.put('%d> Downloading %s...' % (thread_num, output_filename)) out_q.put('%d> Downloading %s...' % (thread_num, output_filename))
...@@ -272,8 +278,8 @@ def _downloader_worker_thread(thread_num, q, force, base_url, ...@@ -272,8 +278,8 @@ def _downloader_worker_thread(thread_num, q, force, base_url,
thread_num, output_filename)) thread_num, output_filename))
code, _, err = gsutil.check_call('cp', file_url, output_filename) code, _, err = gsutil.check_call('cp', file_url, output_filename)
if code != 0: if code != 0:
out_q.put('%d> %s' % (thread_num, err)) out_q.put('%d> %s' % (thread_num, err.decode()))
ret_codes.put((code, err)) ret_codes.put((code, err.decode()))
continue continue
remote_sha1 = get_sha1(output_filename) remote_sha1 = get_sha1(output_filename)
...@@ -328,8 +334,8 @@ def _downloader_worker_thread(thread_num, q, force, base_url, ...@@ -328,8 +334,8 @@ def _downloader_worker_thread(thread_num, q, force, base_url,
# "x-goog-meta-executable". # "x-goog-meta-executable".
code, out, _ = gsutil.check_call('stat', file_url) code, out, _ = gsutil.check_call('stat', file_url)
if code != 0: if code != 0:
out_q.put('%d> %s' % (thread_num, err)) out_q.put('%d> %s' % (thread_num, err.decode()))
ret_codes.put((code, err)) ret_codes.put((code, err.decode()))
elif re.search(r'executable:\s*1', out): elif re.search(r'executable:\s*1', out):
st = os.stat(output_filename) st = os.stat(output_filename)
os.chmod(output_filename, st.st_mode | stat.S_IEXEC) os.chmod(output_filename, st.st_mode | stat.S_IEXEC)
...@@ -348,7 +354,7 @@ class PrinterThread(threading.Thread): ...@@ -348,7 +354,7 @@ class PrinterThread(threading.Thread):
if line is None: if line is None:
break break
self.did_print_anything = True self.did_print_anything = True
print line print(line)
def _data_exists(input_sha1_sum, output_filename, extract): def _data_exists(input_sha1_sum, output_filename, extract):
...@@ -405,9 +411,9 @@ def download_from_google_storage( ...@@ -405,9 +411,9 @@ def download_from_google_storage(
# Start up all the worker threads. # Start up all the worker threads.
all_threads = [] all_threads = []
download_start = time.time() download_start = time.time()
stdout_queue = Queue.Queue() stdout_queue = queue.Queue()
work_queue = Queue.Queue() work_queue = queue.Queue()
ret_codes = Queue.Queue() ret_codes = queue.Queue()
ret_codes.put((0, None)) ret_codes.put((0, None))
for thread_num in range(num_threads): for thread_num in range(num_threads):
t = threading.Thread( t = threading.Thread(
...@@ -438,12 +444,12 @@ def download_from_google_storage( ...@@ -438,12 +444,12 @@ def download_from_google_storage(
for ret_code, message in ret_codes.queue: for ret_code, message in ret_codes.queue:
max_ret_code = max(ret_code, max_ret_code) max_ret_code = max(ret_code, max_ret_code)
if message: if message:
print >> sys.stderr, message print(message, file=sys.stderr)
# Only print summary if any work was done. # Only print summary if any work was done.
if printer_thread.did_print_anything: if printer_thread.did_print_anything:
print 'Downloading %d files took %1f second(s)' % ( print('Downloading %d files took %1f second(s)' %
len(input_data), time.time() - download_start) (len(input_data), time.time() - download_start))
return max_ret_code return max_ret_code
...@@ -530,14 +536,16 @@ def main(args): ...@@ -530,14 +536,16 @@ def main(args):
if (set(('http_proxy', 'https_proxy')).intersection( if (set(('http_proxy', 'https_proxy')).intersection(
env.lower() for env in os.environ) and env.lower() for env in os.environ) and
'NO_AUTH_BOTO_CONFIG' not in os.environ): 'NO_AUTH_BOTO_CONFIG' not in os.environ):
print >> sys.stderr, ('NOTICE: You have PROXY values set in your ' print('NOTICE: You have PROXY values set in your environment, but gsutil'
'environment, but gsutil in depot_tools does not ' 'in depot_tools does not (yet) obey them.',
'(yet) obey them.') file=sys.stderr)
print >> sys.stderr, ('Also, --no_auth prevents the normal BOTO_CONFIG ' print('Also, --no_auth prevents the normal BOTO_CONFIG environment'
'environment variable from being used.') 'variable from being used.',
print >> sys.stderr, ('To use a proxy in this situation, please supply ' file=sys.stderr)
'those settings in a .boto file pointed to by ' print('To use a proxy in this situation, please supply those settings'
'the NO_AUTH_BOTO_CONFIG environment var.') 'in a .boto file pointed to by the NO_AUTH_BOTO_CONFIG environment'
'variable.',
file=sys.stderr)
options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull) options.boto = os.environ.get('NO_AUTH_BOTO_CONFIG', os.devnull)
# Make sure gsutil exists where we expect it to. # Make sure gsutil exists where we expect it to.
...@@ -550,10 +558,10 @@ def main(args): ...@@ -550,10 +558,10 @@ def main(args):
# Passing in -g/--config will run our copy of GSUtil, then quit. # Passing in -g/--config will run our copy of GSUtil, then quit.
if options.config: if options.config:
print '===Note from depot_tools===' print('===Note from depot_tools===')
print 'If you do not have a project ID, enter "0" when asked for one.' print('If you do not have a project ID, enter "0" when asked for one.')
print '===End note from depot_tools===' print('===End note from depot_tools===')
print print()
gsutil.check_call('version') gsutil.check_call('version')
return gsutil.call('config') return gsutil.call('config')
......
...@@ -95,8 +95,8 @@ def fix_win_sys_argv(encoding): ...@@ -95,8 +95,8 @@ def fix_win_sys_argv(encoding):
argc = c_int(0) argc = c_int(0)
argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc)) argv_unicode = CommandLineToArgvW(GetCommandLineW(), byref(argc))
argv = [ argv = [
argv_unicode[i].encode(encoding, 'replace') argv_unicode[i].encode(encoding, 'replace') for i in range(0, argc.value)
for i in xrange(0, argc.value)] ]
if not hasattr(sys, 'frozen'): if not hasattr(sys, 'frozen'):
# If this is an executable produced by py2exe or bbfreeze, then it # If this is an executable produced by py2exe or bbfreeze, then it
...@@ -107,7 +107,7 @@ def fix_win_sys_argv(encoding): ...@@ -107,7 +107,7 @@ def fix_win_sys_argv(encoding):
# Also skip option arguments to the Python interpreter. # Also skip option arguments to the Python interpreter.
while len(argv) > 0: while len(argv) > 0:
arg = argv[0] arg = argv[0]
if not arg.startswith(u'-') or arg == u'-': if not arg.startswith(b'-') or arg == b'-':
break break
argv = argv[1:] argv = argv[1:]
if arg == u'-m': if arg == u'-m':
......
...@@ -96,7 +96,11 @@ import pprint ...@@ -96,7 +96,11 @@ import pprint
import re import re
import sys import sys
import time import time
import urlparse
try:
import urlparse
except ImportError: # For Py3 compatibility
import urllib.parse as urlparse
import detect_host_arch import detect_host_arch
import fix_encoding import fix_encoding
...@@ -128,14 +132,14 @@ def ToGNString(value, allow_dicts = True): ...@@ -128,14 +132,14 @@ def ToGNString(value, allow_dicts = True):
allow_dicts indicates if this function will allow converting dictionaries allow_dicts indicates if this function will allow converting dictionaries
to GN scopes. This is only possible at the top level, you can't nest a to GN scopes. This is only possible at the top level, you can't nest a
GN scope in a list, so this should be set to False for recursive calls.""" GN scope in a list, so this should be set to False for recursive calls."""
if isinstance(value, basestring): if isinstance(value, str):
if value.find('\n') >= 0: if value.find('\n') >= 0:
raise GNException("Trying to print a string with a newline in it.") raise GNException("Trying to print a string with a newline in it.")
return '"' + \ return '"' + \
value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \ value.replace('\\', '\\\\').replace('"', '\\"').replace('$', '\\$') + \
'"' '"'
if isinstance(value, unicode): if sys.version_info.major == 2 and isinstance(value, unicode):
return ToGNString(value.encode('utf-8')) return ToGNString(value.encode('utf-8'))
if isinstance(value, bool): if isinstance(value, bool):
...@@ -286,7 +290,7 @@ class DependencySettings(object): ...@@ -286,7 +290,7 @@ class DependencySettings(object):
self._custom_hooks = custom_hooks or [] self._custom_hooks = custom_hooks or []
# Post process the url to remove trailing slashes. # Post process the url to remove trailing slashes.
if isinstance(self.url, basestring): if isinstance(self.url, str):
# urls are sometime incorrectly written as proto://host/path/@rev. Replace # urls are sometime incorrectly written as proto://host/path/@rev. Replace
# it to proto://host/path@rev. # it to proto://host/path@rev.
self.set_url(self.url.replace('/@', '@')) self.set_url(self.url.replace('/@', '@'))
...@@ -428,7 +432,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): ...@@ -428,7 +432,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
self._OverrideUrl() self._OverrideUrl()
# This is inherited from WorkItem. We want the URL to be a resource. # This is inherited from WorkItem. We want the URL to be a resource.
if self.url and isinstance(self.url, basestring): if self.url and isinstance(self.url, str):
# The url is usually given to gclient either as https://blah@123 # The url is usually given to gclient either as https://blah@123
# or just https://blah. The @123 portion is irrelevant. # or just https://blah. The @123 portion is irrelevant.
self.resources.append(self.url.split('@')[0]) self.resources.append(self.url.split('@')[0])
...@@ -448,7 +452,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): ...@@ -448,7 +452,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
self.url, parsed_url) self.url, parsed_url)
self.set_url(parsed_url) self.set_url(parsed_url)
elif isinstance(self.url, basestring): elif isinstance(self.url, str):
parsed_url = urlparse.urlparse(self.url) parsed_url = urlparse.urlparse(self.url)
if (not parsed_url[0] and if (not parsed_url[0] and
not re.match(r'^\w+\@[\w\.-]+\:[\w\/]+', parsed_url[2])): not re.match(r'^\w+\@[\w\.-]+\:[\w\/]+', parsed_url[2])):
...@@ -572,7 +576,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): ...@@ -572,7 +576,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
# If a line is in custom_deps, but not in the solution, we want to append # If a line is in custom_deps, but not in the solution, we want to append
# this line to the solution. # this line to the solution.
for dep_name, dep_info in self.custom_deps.iteritems(): for dep_name, dep_info in self.custom_deps.items():
if dep_name not in deps: if dep_name not in deps:
deps[dep_name] = {'url': dep_info, 'dep_type': 'git'} deps[dep_name] = {'url': dep_info, 'dep_type': 'git'}
...@@ -601,7 +605,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): ...@@ -601,7 +605,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
def _deps_to_objects(self, deps, use_relative_paths): def _deps_to_objects(self, deps, use_relative_paths):
"""Convert a deps dict to a dict of Dependency objects.""" """Convert a deps dict to a dict of Dependency objects."""
deps_to_add = [] deps_to_add = []
for name, dep_value in deps.iteritems(): for name, dep_value in deps.items():
should_process = self.should_process should_process = self.should_process
if dep_value is None: if dep_value is None:
continue continue
...@@ -709,7 +713,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): ...@@ -709,7 +713,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
self._vars = local_scope.get('vars', {}) self._vars = local_scope.get('vars', {})
if self.parent: if self.parent:
for key, value in self.parent.get_vars().iteritems(): for key, value in self.parent.get_vars().items():
if key in self._vars: if key in self._vars:
self._vars[key] = value self._vars[key] = value
# Since we heavily post-process things, freeze ones which should # Since we heavily post-process things, freeze ones which should
...@@ -737,7 +741,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): ...@@ -737,7 +741,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
if 'recursedeps' in local_scope: if 'recursedeps' in local_scope:
for ent in local_scope['recursedeps']: for ent in local_scope['recursedeps']:
if isinstance(ent, basestring): if isinstance(ent, str):
self.recursedeps[ent] = self.deps_file self.recursedeps[ent] = self.deps_file
else: # (depname, depsfilename) else: # (depname, depsfilename)
self.recursedeps[ent[0]] = ent[1] self.recursedeps[ent[0]] = ent[1]
...@@ -746,7 +750,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): ...@@ -746,7 +750,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
if rel_prefix: if rel_prefix:
logging.warning('Updating recursedeps by prepending %s.', rel_prefix) logging.warning('Updating recursedeps by prepending %s.', rel_prefix)
rel_deps = {} rel_deps = {}
for depname, options in self.recursedeps.iteritems(): for depname, options in self.recursedeps.items():
rel_deps[ rel_deps[
os.path.normpath(os.path.join(rel_prefix, depname))] = options os.path.normpath(os.path.join(rel_prefix, depname))] = options
self.recursedeps = rel_deps self.recursedeps = rel_deps
...@@ -1004,7 +1008,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings): ...@@ -1004,7 +1008,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
variables = self.get_vars() variables = self.get_vars()
for arg in self._gn_args: for arg in self._gn_args:
value = variables[arg] value = variables[arg]
if isinstance(value, basestring): if isinstance(value, str):
value = gclient_eval.EvaluateCondition(value, variables) value = gclient_eval.EvaluateCondition(value, variables)
lines.append('%s = %s' % (arg, ToGNString(value))) lines.append('%s = %s' % (arg, ToGNString(value)))
with open(os.path.join(self.root.root_dir, self._gn_args_file), 'w') as f: with open(os.path.join(self.root.root_dir, self._gn_args_file), 'w') as f:
...@@ -1353,7 +1357,8 @@ solutions = %(solution_list)s ...@@ -1353,7 +1357,8 @@ solutions = %(solution_list)s
mirror.exists()) mirror.exists())
else: else:
mirror_string = 'not used' mirror_string = 'not used'
raise gclient_utils.Error(''' raise gclient_utils.Error(
'''
Your .gclient file seems to be broken. The requested URL is different from what Your .gclient file seems to be broken. The requested URL is different from what
is actually checked out in %(checkout_path)s. is actually checked out in %(checkout_path)s.
...@@ -1581,7 +1586,7 @@ it or fix the checkout. ...@@ -1581,7 +1586,7 @@ it or fix the checkout.
full_entries = [os.path.join(self.root_dir, e.replace('/', os.path.sep)) full_entries = [os.path.join(self.root_dir, e.replace('/', os.path.sep))
for e in entries] for e in entries]
for entry, prev_url in self._ReadEntries().iteritems(): for entry, prev_url in self._ReadEntries().items():
if not prev_url: if not prev_url:
# entry must have been overridden via .gclient custom_deps # entry must have been overridden via .gclient custom_deps
continue continue
......
This diff is collapsed.
...@@ -7,6 +7,9 @@ ...@@ -7,6 +7,9 @@
# code, and even more importantly don't add more toplevel import statements, # code, and even more importantly don't add more toplevel import statements,
# particularly for modules that are not builtin (see sys.builtin_modules_names, # particularly for modules that are not builtin (see sys.builtin_modules_names,
# os isn't built in, but it's essential to this file). # os isn't built in, but it's essential to this file).
from __future__ import print_function
import os import os
import sys import sys
...@@ -30,16 +33,17 @@ def FindGclientRoot(from_dir, filename='.gclient'): ...@@ -30,16 +33,17 @@ def FindGclientRoot(from_dir, filename='.gclient'):
# might have failed. In that case, we cannot verify that the .gclient # might have failed. In that case, we cannot verify that the .gclient
# is the one we want to use. In order to not to cause too much trouble, # is the one we want to use. In order to not to cause too much trouble,
# just issue a warning and return the path anyway. # just issue a warning and return the path anyway.
print >> sys.stderr, ("%s missing, %s file in parent directory %s might " print(
"not be the file you want to use." % "%s missing, %s file in parent directory %s might not be the file "
(entries_filename, filename, path)) "you want to use." % (entries_filename, filename, path),
file=sys.stderr)
return path return path
scope = {} scope = {}
try: try:
import io import io
with io.open(entries_filename, encoding='utf-8') as f: with io.open(entries_filename, encoding='utf-8') as f:
exec(f.read(), scope) exec(f.read(), scope)
except SyntaxError, e: except SyntaxError as e:
SyntaxErrorToError(filename, e) SyntaxErrorToError(filename, e)
all_directories = scope['entries'].keys() all_directories = scope['entries'].keys()
path_to_check = real_from_dir[len(path)+1:] path_to_check = real_from_dir[len(path)+1:]
...@@ -113,7 +117,7 @@ def GetBuildtoolsPlatformBinaryPath(): ...@@ -113,7 +117,7 @@ def GetBuildtoolsPlatformBinaryPath():
elif sys.platform == 'darwin': elif sys.platform == 'darwin':
subdir = 'mac' subdir = 'mac'
elif sys.platform.startswith('linux'): elif sys.platform.startswith('linux'):
subdir = 'linux64' subdir = 'linux64'
else: else:
raise Error('Unknown platform: ' + sys.platform) raise Error('Unknown platform: ' + sys.platform)
return os.path.join(buildtools_path, subdir) return os.path.join(buildtools_path, subdir)
...@@ -130,7 +134,8 @@ def GetGClientPrimarySolutionName(gclient_root_dir_path): ...@@ -130,7 +134,8 @@ def GetGClientPrimarySolutionName(gclient_root_dir_path):
"""Returns the name of the primary solution in the .gclient file specified.""" """Returns the name of the primary solution in the .gclient file specified."""
gclient_config_file = os.path.join(gclient_root_dir_path, '.gclient') gclient_config_file = os.path.join(gclient_root_dir_path, '.gclient')
env = {} env = {}
execfile(gclient_config_file, env) exec(compile(open(gclient_config_file).read(), gclient_config_file, 'exec'),
env)
solutions = env.get('solutions', []) solutions = env.get('solutions', [])
if solutions: if solutions:
return solutions[0].get('name') return solutions[0].get('name')
......
...@@ -18,7 +18,11 @@ import sys ...@@ -18,7 +18,11 @@ import sys
import tempfile import tempfile
import threading import threading
import traceback import traceback
import urlparse
try:
import urlparse
except ImportError: # For Py3 compatibility
import urllib.parse as urlparse
import download_from_google_storage import download_from_google_storage
import gclient_utils import gclient_utils
...@@ -311,7 +315,8 @@ class GitWrapper(SCMWrapper): ...@@ -311,7 +315,8 @@ class GitWrapper(SCMWrapper):
if file_list is not None: if file_list is not None:
files = self._Capture( files = self._Capture(
['-c', 'core.quotePath=false', 'ls-files']).splitlines() ['-c', 'core.quotePath=false', 'ls-files']).splitlines()
file_list.extend([os.path.join(self.checkout_path, f) for f in files]) file_list.extend(
[os.path.join(self.checkout_path, f.decode()) for f in files])
def _DisableHooks(self): def _DisableHooks(self):
hook_dir = os.path.join(self.checkout_path, '.git', 'hooks') hook_dir = os.path.join(self.checkout_path, '.git', 'hooks')
...@@ -590,10 +595,10 @@ class GitWrapper(SCMWrapper): ...@@ -590,10 +595,10 @@ class GitWrapper(SCMWrapper):
# Skip url auto-correction if remote.origin.gclient-auto-fix-url is set. # Skip url auto-correction if remote.origin.gclient-auto-fix-url is set.
# This allows devs to use experimental repos which have a different url # This allows devs to use experimental repos which have a different url
# but whose branch(s) are the same as official repos. # but whose branch(s) are the same as official repos.
if (current_url.rstrip('/') != url.rstrip('/') and if (current_url.rstrip(b'/') != url.rstrip('/') and url != 'git://foo' and
url != 'git://foo' and
subprocess2.capture( subprocess2.capture(
['git', 'config', 'remote.%s.gclient-auto-fix-url' % self.remote], ['git', 'config',
'remote.%s.gclient-auto-fix-url' % self.remote],
cwd=self.checkout_path).strip() != 'False'): cwd=self.checkout_path).strip() != 'False'):
self.Print('_____ switching %s to a new upstream' % self.relpath) self.Print('_____ switching %s to a new upstream' % self.relpath)
if not (options.force or options.reset): if not (options.force or options.reset):
...@@ -1117,7 +1122,7 @@ class GitWrapper(SCMWrapper): ...@@ -1117,7 +1122,7 @@ class GitWrapper(SCMWrapper):
try: try:
rebase_output = scm.GIT.Capture(rebase_cmd, cwd=self.checkout_path) rebase_output = scm.GIT.Capture(rebase_cmd, cwd=self.checkout_path)
except subprocess2.CalledProcessError, e: except subprocess2.CalledProcessError as e:
if (re.match(r'cannot rebase: you have unstaged changes', e.stderr) or if (re.match(r'cannot rebase: you have unstaged changes', e.stderr) or
re.match(r'cannot rebase: your index contains uncommitted changes', re.match(r'cannot rebase: your index contains uncommitted changes',
e.stderr)): e.stderr)):
...@@ -1456,9 +1461,9 @@ class CipdRoot(object): ...@@ -1456,9 +1461,9 @@ class CipdRoot(object):
try: try:
ensure_file = None ensure_file = None
with tempfile.NamedTemporaryFile( with tempfile.NamedTemporaryFile(
suffix='.ensure', delete=False) as ensure_file: suffix='.ensure', delete=False, mode='w') as ensure_file:
ensure_file.write('$ParanoidMode CheckPresence\n\n') ensure_file.write('$ParanoidMode CheckPresence\n\n')
for subdir, packages in sorted(self._packages_by_subdir.iteritems()): for subdir, packages in sorted(self._packages_by_subdir.items()):
ensure_file.write('@Subdir %s\n' % subdir) ensure_file.write('@Subdir %s\n' % subdir)
for package in sorted(packages, key=lambda p: p.name): for package in sorted(packages, key=lambda p: p.name):
ensure_file.write('%s %s\n' % (package.name, package.version)) ensure_file.write('%s %s\n' % (package.name, package.version))
......
This diff is collapsed.
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
"""A git command for managing a local cache of git repositories.""" """A git command for managing a local cache of git repositories."""
from __future__ import print_function from __future__ import print_function
import contextlib import contextlib
import errno import errno
import logging import logging
...@@ -17,7 +18,12 @@ import threading ...@@ -17,7 +18,12 @@ import threading
import time import time
import subprocess import subprocess
import sys import sys
import urlparse
try:
import urlparse
except ImportError: # For Py3 compatibility
import urllib.parse as urlparse
import zipfile import zipfile
from download_from_google_storage import Gsutil from download_from_google_storage import Gsutil
......
...@@ -4443,7 +4443,7 @@ def GenerateGerritChangeId(message): ...@@ -4443,7 +4443,7 @@ def GenerateGerritChangeId(message):
# entropy. # entropy.
lines.append(message) lines.append(message)
change_hash = RunCommand(['git', 'hash-object', '-t', 'commit', '--stdin'], change_hash = RunCommand(['git', 'hash-object', '-t', 'commit', '--stdin'],
stdin='\n'.join(lines)) stdin=('\n'.join(lines)).encode())
return 'I%s' % change_hash.strip() return 'I%s' % change_hash.strip()
......
...@@ -17,7 +17,12 @@ import subprocess ...@@ -17,7 +17,12 @@ import subprocess
import sys import sys
import tempfile import tempfile
import time import time
import urllib2
try:
import urllib2 as urllib
except ImportError: # For Py3 compatibility
import urllib.request as urllib
import zipfile import zipfile
...@@ -53,7 +58,7 @@ def download_gsutil(version, target_dir): ...@@ -53,7 +58,7 @@ def download_gsutil(version, target_dir):
local_md5 = md5_calc.hexdigest() local_md5 = md5_calc.hexdigest()
metadata_url = '%s%s' % (API_URL, filename) metadata_url = '%s%s' % (API_URL, filename)
metadata = json.load(urllib2.urlopen(metadata_url)) metadata = json.load(urllib.urlopen(metadata_url))
remote_md5 = base64.b64decode(metadata['md5Hash']) remote_md5 = base64.b64decode(metadata['md5Hash'])
if local_md5 == remote_md5: if local_md5 == remote_md5:
...@@ -62,7 +67,7 @@ def download_gsutil(version, target_dir): ...@@ -62,7 +67,7 @@ def download_gsutil(version, target_dir):
# Do the download. # Do the download.
url = '%s%s' % (GSUTIL_URL, filename) url = '%s%s' % (GSUTIL_URL, filename)
u = urllib2.urlopen(url) u = urllib.urlopen(url)
with open(target_filename, 'wb') as f: with open(target_filename, 'wb') as f:
while True: while True:
buf = u.read(4096) buf = u.read(4096)
......
...@@ -15,7 +15,11 @@ import tempfile ...@@ -15,7 +15,11 @@ import tempfile
import threading import threading
import time import time
import traceback import traceback
import urllib2
try:
import urllib2 as urllib
except ImportError: # For Py3 compatibility
import urllib.request as urllib
import detect_host_arch import detect_host_arch
import gclient_utils import gclient_utils
...@@ -60,9 +64,9 @@ class _Config(object): ...@@ -60,9 +64,9 @@ class _Config(object):
# check if we can reach the page. An external developer would get access # check if we can reach the page. An external developer would get access
# denied. # denied.
try: try:
req = urllib2.urlopen(metrics_utils.APP_URL + '/should-upload') req = urllib.urlopen(metrics_utils.APP_URL + '/should-upload')
self._config['is-googler'] = req.getcode() == 200 self._config['is-googler'] = req.getcode() == 200
except (urllib2.URLError, urllib2.HTTPError): except (urllib.URLError, urllib.HTTPError):
self._config['is-googler'] = False self._config['is-googler'] = False
# Make sure the config variables we need are present, and initialize them to # Make sure the config variables we need are present, and initialize them to
...@@ -224,7 +228,7 @@ class MetricsCollector(object): ...@@ -224,7 +228,7 @@ class MetricsCollector(object):
self._upload_metrics_data() self._upload_metrics_data()
if exception: if exception:
raise exception[0], exception[1], exception[2] gclient_utils.reraise(exception[0], exception[1], exception[2])
return result return result
def collect_metrics(self, command_name): def collect_metrics(self, command_name):
......
...@@ -9,7 +9,11 @@ import re ...@@ -9,7 +9,11 @@ import re
import scm import scm
import subprocess2 import subprocess2
import sys import sys
import urlparse
try:
import urlparse
except ImportError: # For Py3 compatibility
import urllib.parse as urlparse
# Current version of metrics recording. # Current version of metrics recording.
...@@ -280,7 +284,7 @@ def print_boxed_text(out, min_width, lines): ...@@ -280,7 +284,7 @@ def print_boxed_text(out, min_width, lines):
width = max(min_width, max(len(line) for line in lines)) width = max(min_width, max(len(line) for line in lines))
out(SE + EW * (width + 2) + SW + '\n') out(SE + EW * (width + 2) + SW + '\n')
for line in lines: for line in lines:
out('%s %-*s %s\n' % (NS, width, line, NS)) out('%s %-*s %s\n' % (NS, width, line, NS))
out(NE + EW * (width + 2) + NW + '\n') out(NE + EW * (width + 2) + NW + '\n')
def print_notice(countdown): def print_notice(countdown):
......
...@@ -4,8 +4,8 @@ ...@@ -4,8 +4,8 @@
"""SCM-specific utility classes.""" """SCM-specific utility classes."""
import cStringIO
import glob import glob
import io
import logging import logging
import os import os
import platform import platform
...@@ -51,7 +51,7 @@ def GenFakeDiff(filename): ...@@ -51,7 +51,7 @@ def GenFakeDiff(filename):
filename = filename.replace(os.sep, '/') filename = filename.replace(os.sep, '/')
nb_lines = len(file_content) nb_lines = len(file_content)
# We need to use / since patch on unix will fail otherwise. # We need to use / since patch on unix will fail otherwise.
data = cStringIO.StringIO() data = io.StringIO()
data.write("Index: %s\n" % filename) data.write("Index: %s\n" % filename)
data.write('=' * 67 + '\n') data.write('=' * 67 + '\n')
# Note: Should we use /dev/null instead? # Note: Should we use /dev/null instead?
...@@ -369,9 +369,9 @@ class GIT(object): ...@@ -369,9 +369,9 @@ class GIT(object):
"""Asserts git's version is at least min_version.""" """Asserts git's version is at least min_version."""
if cls.current_version is None: if cls.current_version is None:
current_version = cls.Capture(['--version'], '.') current_version = cls.Capture(['--version'], '.')
matched = re.search(r'version ([0-9\.]+)', current_version) matched = re.search(r'version ([0-9\.]+)', current_version.decode())
cls.current_version = matched.group(1) cls.current_version = matched.group(1)
current_version_list = map(only_int, cls.current_version.split('.')) current_version_list = list(map(only_int, cls.current_version.split('.')))
for min_ver in map(int, min_version.split('.')): for min_ver in map(int, min_version.split('.')):
ver = current_version_list.pop(0) ver = current_version_list.pop(0)
if ver < min_ver: if ver < min_ver:
......
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
from __future__ import print_function
import os import os
import sys import sys
from third_party import colorama from third_party import colorama
...@@ -90,5 +92,5 @@ def init(): ...@@ -90,5 +92,5 @@ def init():
if __name__ == '__main__': if __name__ == '__main__':
init() init()
print 'IS_TTY:', IS_TTY print('IS_TTY:', IS_TTY)
print 'OUT_TYPE:', OUT_TYPE print('OUT_TYPE:', OUT_TYPE)
...@@ -7,12 +7,17 @@ ...@@ -7,12 +7,17 @@
In theory you shouldn't need anything else in subprocess, or this module failed. In theory you shouldn't need anything else in subprocess, or this module failed.
""" """
import cStringIO
import codecs import codecs
import errno import errno
import io
import logging import logging
import os import os
import Queue
try:
import Queue
except ImportError: # For Py3 compatibility
import queue as Queue
import subprocess import subprocess
import sys import sys
import time import time
...@@ -20,7 +25,8 @@ import threading ...@@ -20,7 +25,8 @@ import threading
# Cache the string-escape codec to ensure subprocess can find it later. # Cache the string-escape codec to ensure subprocess can find it later.
# See crbug.com/912292#c2 for context. # See crbug.com/912292#c2 for context.
codecs.lookup('string-escape') if sys.version_info.major == 2:
codecs.lookup('string-escape')
# Constants forwarded from subprocess. # Constants forwarded from subprocess.
PIPE = subprocess.PIPE PIPE = subprocess.PIPE
...@@ -208,7 +214,8 @@ class Popen(subprocess.Popen): ...@@ -208,7 +214,8 @@ class Popen(subprocess.Popen):
# the list. # the list.
kwargs['shell'] = bool(sys.platform=='win32') kwargs['shell'] = bool(sys.platform=='win32')
if isinstance(args, basestring): if isinstance(args, str) or (sys.version_info.major == 2 and
isinstance(args, unicode)):
tmp_str = args tmp_str = args
elif isinstance(args, (list, tuple)): elif isinstance(args, (list, tuple)):
tmp_str = ' '.join(args) tmp_str = ' '.join(args)
...@@ -248,7 +255,7 @@ class Popen(subprocess.Popen): ...@@ -248,7 +255,7 @@ class Popen(subprocess.Popen):
try: try:
with self.popen_lock: with self.popen_lock:
super(Popen, self).__init__(args, **kwargs) super(Popen, self).__init__(args, **kwargs)
except OSError, e: except OSError as e:
if e.errno == errno.EAGAIN and sys.platform == 'cygwin': if e.errno == errno.EAGAIN and sys.platform == 'cygwin':
# Convert fork() emulation failure into a CygwinRebaseError(). # Convert fork() emulation failure into a CygwinRebaseError().
raise CygwinRebaseError( raise CygwinRebaseError(
...@@ -285,7 +292,7 @@ class Popen(subprocess.Popen): ...@@ -285,7 +292,7 @@ class Popen(subprocess.Popen):
def write_stdin(): def write_stdin():
try: try:
stdin_io = cStringIO.StringIO(input) stdin_io = io.BytesIO(input)
while True: while True:
data = stdin_io.read(1024) data = stdin_io.read(1024)
if data: if data:
...@@ -451,7 +458,8 @@ def communicate(args, timeout=None, nag_timer=None, nag_max=None, **kwargs): ...@@ -451,7 +458,8 @@ def communicate(args, timeout=None, nag_timer=None, nag_max=None, **kwargs):
""" """
stdin = kwargs.pop('stdin', None) stdin = kwargs.pop('stdin', None)
if stdin is not None: if stdin is not None:
if isinstance(stdin, basestring): if isinstance(stdin, str) or (sys.version_info.major == 2 and
isinstance(stdin, unicode)):
# When stdin is passed as an argument, use it as the actual input data and # When stdin is passed as an argument, use it as the actual input data and
# set the Popen() parameter accordingly. # set the Popen() parameter accordingly.
kwargs['stdin'] = PIPE kwargs['stdin'] = PIPE
......
...@@ -33,7 +33,7 @@ class MetricsCollectorTest(unittest.TestCase): ...@@ -33,7 +33,7 @@ class MetricsCollectorTest(unittest.TestCase):
self.collector = metrics.MetricsCollector() self.collector = metrics.MetricsCollector()
# Keep track of the URL requests, file reads/writes and subprocess spawned. # Keep track of the URL requests, file reads/writes and subprocess spawned.
self.urllib2 = mock.Mock() self.urllib = mock.Mock()
self.print_notice = mock.Mock() self.print_notice = mock.Mock()
self.print_version_change = mock.Mock() self.print_version_change = mock.Mock()
self.Popen = mock.Mock() self.Popen = mock.Mock()
...@@ -42,7 +42,7 @@ class MetricsCollectorTest(unittest.TestCase): ...@@ -42,7 +42,7 @@ class MetricsCollectorTest(unittest.TestCase):
# So that we don't have to update the tests everytime we change the version. # So that we don't have to update the tests everytime we change the version.
mock.patch('metrics.metrics_utils.CURRENT_VERSION', 0).start() mock.patch('metrics.metrics_utils.CURRENT_VERSION', 0).start()
mock.patch('metrics.urllib2', self.urllib2).start() mock.patch('metrics.urllib', self.urllib).start()
mock.patch('metrics.subprocess.Popen', self.Popen).start() mock.patch('metrics.subprocess.Popen', self.Popen).start()
mock.patch('metrics.gclient_utils.FileWrite', self.FileWrite).start() mock.patch('metrics.gclient_utils.FileWrite', self.FileWrite).start()
mock.patch('metrics.gclient_utils.FileRead', self.FileRead).start() mock.patch('metrics.gclient_utils.FileRead', self.FileRead).start()
...@@ -92,7 +92,7 @@ class MetricsCollectorTest(unittest.TestCase): ...@@ -92,7 +92,7 @@ class MetricsCollectorTest(unittest.TestCase):
def test_writes_config_if_not_exists(self): def test_writes_config_if_not_exists(self):
self.FileRead.side_effect = [IOError(2, "No such file or directory")] self.FileRead.side_effect = [IOError(2, "No such file or directory")]
mock_response = mock.Mock() mock_response = mock.Mock()
self.urllib2.urlopen.side_effect = [mock_response] self.urllib.urlopen.side_effect = [mock_response]
mock_response.getcode.side_effect = [200] mock_response.getcode.side_effect = [200]
self.assertTrue(self.collector.config.is_googler) self.assertTrue(self.collector.config.is_googler)
...@@ -106,7 +106,7 @@ class MetricsCollectorTest(unittest.TestCase): ...@@ -106,7 +106,7 @@ class MetricsCollectorTest(unittest.TestCase):
def test_writes_config_if_not_exists_non_googler(self): def test_writes_config_if_not_exists_non_googler(self):
self.FileRead.side_effect = [IOError(2, "No such file or directory")] self.FileRead.side_effect = [IOError(2, "No such file or directory")]
mock_response = mock.Mock() mock_response = mock.Mock()
self.urllib2.urlopen.side_effect = [mock_response] self.urllib.urlopen.side_effect = [mock_response]
mock_response.getcode.side_effect = [403] mock_response.getcode.side_effect = [403]
self.assertFalse(self.collector.config.is_googler) self.assertFalse(self.collector.config.is_googler)
...@@ -120,7 +120,7 @@ class MetricsCollectorTest(unittest.TestCase): ...@@ -120,7 +120,7 @@ class MetricsCollectorTest(unittest.TestCase):
def test_disables_metrics_if_cant_write_config(self): def test_disables_metrics_if_cant_write_config(self):
self.FileRead.side_effect = [IOError(2, 'No such file or directory')] self.FileRead.side_effect = [IOError(2, 'No such file or directory')]
mock_response = mock.Mock() mock_response = mock.Mock()
self.urllib2.urlopen.side_effect = [mock_response] self.urllib.urlopen.side_effect = [mock_response]
mock_response.getcode.side_effect = [200] mock_response.getcode.side_effect = [200]
self.FileWrite.side_effect = [IOError(13, 'Permission denied.')] self.FileWrite.side_effect = [IOError(13, 'Permission denied.')]
......
...@@ -47,7 +47,6 @@ class RootTestCase(BaseSCMTestCase): ...@@ -47,7 +47,6 @@ class RootTestCase(BaseSCMTestCase):
def testMembersChanged(self): def testMembersChanged(self):
self.mox.ReplayAll() self.mox.ReplayAll()
members = [ members = [
'cStringIO',
'determine_scm', 'determine_scm',
'ElementTree', 'ElementTree',
'gclient_utils', 'gclient_utils',
...@@ -55,6 +54,7 @@ class RootTestCase(BaseSCMTestCase): ...@@ -55,6 +54,7 @@ class RootTestCase(BaseSCMTestCase):
'GetCasedPath', 'GetCasedPath',
'GIT', 'GIT',
'glob', 'glob',
'io',
'logging', 'logging',
'only_int', 'only_int',
'os', 'os',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment