Commit c5f0cbb8 authored by Aravind Vasudevan's avatar Aravind Vasudevan Committed by LUCI CQ

Use pylint 2.7 for depot_tools

This includes a few fixes for specific errors, and disables several new
warnings introduced in this version, in order to allow for an incremental migration.

Bug:1262286
Change-Id: I4b8f8fc521386419a3121bbb07edc8ac83170a94
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3413679Reviewed-by: 's avatarJosip Sokcevic <sokcevic@google.com>
Commit-Queue: Aravind Vasudevan <aravindvasudev@google.com>
parent d94f9a6d
...@@ -57,15 +57,27 @@ def CheckPylint(input_api, output_api): ...@@ -57,15 +57,27 @@ def CheckPylint(input_api, output_api):
files_to_skip.extend([fnmatch.translate(l) for l in lines if files_to_skip.extend([fnmatch.translate(l) for l in lines if
l and not l.startswith('#')]) l and not l.startswith('#')])
disabled_warnings = [ disabled_warnings = [
'R0401', # Cyclic import 'R0401', # Cyclic import
'W0613', # Unused argument 'W0613', # Unused argument
'C0415', # import-outside-toplevel
'R1710', # inconsistent-return-statements
'E1101', # no-member
'E1120', # no-value-for-parameter
'R1708', # stop-iteration-return
'W1510', # subprocess-run-check
# Checks which should be re-enabled after Python 2 support is removed.
'R0205', # useless-object-inheritance
'R1725', # super-with-arguments
'W0707', # raise-missing-from
'W1113', # keyword-arg-before-vararg
] ]
return input_api.RunTests(input_api.canned_checks.GetPylint( return input_api.RunTests(input_api.canned_checks.GetPylint(
input_api, input_api,
output_api, output_api,
files_to_check=files_to_check, files_to_check=files_to_check,
files_to_skip=files_to_skip, files_to_skip=files_to_skip,
disabled_warnings=disabled_warnings)) disabled_warnings=disabled_warnings,
version='2.7'), parallel=False)
def CheckRecipes(input_api, output_api): def CheckRecipes(input_api, output_api):
......
...@@ -51,7 +51,7 @@ for index, arg in enumerate(input_args[1:]): ...@@ -51,7 +51,7 @@ for index, arg in enumerate(input_args[1:]):
elif arg.startswith('-C'): elif arg.startswith('-C'):
# Support -Cout/Default # Support -Cout/Default
output_dir = arg[2:] output_dir = arg[2:]
elif arg == '-o' or arg == '--offline': elif arg in ('-o', '--offline'):
offline = True offline = True
elif arg == '-h': elif arg == '-h':
print('autoninja: Use -o/--offline to temporary disable goma.', print('autoninja: Use -o/--offline to temporary disable goma.',
...@@ -59,7 +59,7 @@ for index, arg in enumerate(input_args[1:]): ...@@ -59,7 +59,7 @@ for index, arg in enumerate(input_args[1:]):
print(file=sys.stderr) print(file=sys.stderr)
# Strip -o/--offline so ninja doesn't see them. # Strip -o/--offline so ninja doesn't see them.
input_args = [ arg for arg in input_args if arg != '-o' and arg != '--offline'] input_args = [ arg for arg in input_args if arg not in ('-o', '--offline')]
use_goma = False use_goma = False
use_remoteexec = False use_remoteexec = False
......
...@@ -83,8 +83,8 @@ def is_exe(filename): ...@@ -83,8 +83,8 @@ def is_exe(filename):
"""Given a full filepath, return true if the file is an executable.""" """Given a full filepath, return true if the file is an executable."""
if sys.platform.startswith('win'): if sys.platform.startswith('win'):
return filename.endswith('.exe') return filename.endswith('.exe')
else:
return os.path.isfile(filename) and os.access(filename, os.X_OK) return os.path.isfile(filename) and os.access(filename, os.X_OK)
def get_available_tools(): def get_available_tools():
......
...@@ -28,7 +28,9 @@ def main(): ...@@ -28,7 +28,9 @@ def main():
sys.argv[0]) sys.argv[0])
sys.exit(1) sys.exit(1)
# pylint: disable=unbalanced-tuple-unpacking
base, current, others, file_name_in_tree = sys.argv[1:5] base, current, others, file_name_in_tree = sys.argv[1:5]
# pylint: enable=unbalanced-tuple-unpacking
if file_name_in_tree == '%P': if file_name_in_tree == '%P':
print(file=sys.stderr) print(file=sys.stderr)
......
...@@ -23,7 +23,7 @@ def path_to_source_root(path): ...@@ -23,7 +23,7 @@ def path_to_source_root(path):
# to break when we rename directories. # to break when we rename directories.
fingerprints = ['chrome', 'net', 'v8', 'build', 'skia'] fingerprints = ['chrome', 'net', 'v8', 'build', 'skia']
while candidate and not all( while candidate and not all(
[os.path.isdir(os.path.join(candidate, fp)) for fp in fingerprints]): os.path.isdir(os.path.join(candidate, fp)) for fp in fingerprints):
new_candidate = os.path.dirname(candidate) new_candidate = os.path.dirname(candidate)
if new_candidate == candidate: if new_candidate == candidate:
raise Exception("Couldn't find source-dir from %s" % path) raise Exception("Couldn't find source-dir from %s" % path)
......
...@@ -5263,8 +5263,8 @@ _RE_PATTERN_STRING = re.compile(r'\bstring\b') ...@@ -5263,8 +5263,8 @@ _RE_PATTERN_STRING = re.compile(r'\bstring\b')
_re_pattern_headers_maybe_templates = [] _re_pattern_headers_maybe_templates = []
for _header, _templates in _HEADERS_MAYBE_TEMPLATES: for _header, _templates in _HEADERS_MAYBE_TEMPLATES:
for _template in _templates: for _template in _templates:
# Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or # Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max
# type::max(). # or type::max().
_re_pattern_headers_maybe_templates.append( _re_pattern_headers_maybe_templates.append(
(re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'), (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
_template, _template,
...@@ -5380,9 +5380,9 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, ...@@ -5380,9 +5380,9 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
io: The IO factory to use to read the header file. Provided for unittest io: The IO factory to use to read the header file. Provided for unittest
injection. injection.
""" """
required = {} # A map of header name to linenumber and the template entity. # A map of header name to linenumber and the template entity.
# Example of required: { '<functional>': (1219, 'less<>') } # Example of required: { '<functional>': (1219, 'less<>') }
required = {}
for linenum in range(clean_lines.NumLines()): for linenum in range(clean_lines.NumLines()):
line = clean_lines.elided[linenum] line = clean_lines.elided[linenum]
if not line or line[0] == '#': if not line or line[0] == '#':
...@@ -5865,9 +5865,9 @@ def ProcessConfigOverrides(filename): ...@@ -5865,9 +5865,9 @@ def ProcessConfigOverrides(filename):
elif name == 'linelength': elif name == 'linelength':
global _line_length global _line_length
try: try:
_line_length = int(val) _line_length = int(val)
except ValueError: except ValueError:
sys.stderr.write('Line length must be numeric.') sys.stderr.write('Line length must be numeric.')
else: else:
sys.stderr.write( sys.stderr.write(
'Invalid configuration option (%s) in file %s\n' % 'Invalid configuration option (%s) in file %s\n' %
...@@ -5881,7 +5881,7 @@ def ProcessConfigOverrides(filename): ...@@ -5881,7 +5881,7 @@ def ProcessConfigOverrides(filename):
# Apply all the accumulated filters in reverse order (top-level directory # Apply all the accumulated filters in reverse order (top-level directory
# config options having the least priority). # config options having the least priority).
for filter in reversed(cfg_filters): for filter in reversed(cfg_filters):
_AddFilters(filter) _AddFilters(filter)
return True return True
...@@ -6053,15 +6053,15 @@ def ParseArguments(args): ...@@ -6053,15 +6053,15 @@ def ParseArguments(args):
elif opt == '--linelength': elif opt == '--linelength':
global _line_length global _line_length
try: try:
_line_length = int(val) _line_length = int(val)
except ValueError: except ValueError:
PrintUsage('Line length must be digits.') PrintUsage('Line length must be digits.')
elif opt == '--extensions': elif opt == '--extensions':
global _valid_extensions global _valid_extensions
try: try:
_valid_extensions = set(val.split(',')) _valid_extensions = set(val.split(','))
except ValueError: except ValueError:
PrintUsage('Extensions must be comma separated list.') PrintUsage('Extensions must be comma separated list.')
if not filenames: if not filenames:
PrintUsage('No files were specified.') PrintUsage('No files were specified.')
......
...@@ -44,9 +44,10 @@ PLATFORM_MAPPING = { ...@@ -44,9 +44,10 @@ PLATFORM_MAPPING = {
'aix7': 'aix', 'aix7': 'aix',
} }
if sys.version_info.major == 2:
class FileNotFoundError(IOError): # pylint: disable=redefined-builtin
pass class FileNotFoundError(IOError):
pass
class InvalidFileError(IOError): class InvalidFileError(IOError):
......
...@@ -56,7 +56,6 @@ class Checkout(object): ...@@ -56,7 +56,6 @@ class Checkout(object):
def exists(self): def exists(self):
"""Check does this checkout already exist on desired location""" """Check does this checkout already exist on desired location"""
pass
def init(self): def init(self):
pass pass
...@@ -67,18 +66,18 @@ class Checkout(object): ...@@ -67,18 +66,18 @@ class Checkout(object):
return '' return ''
if return_stdout: if return_stdout:
return subprocess.check_output(cmd, **kwargs).decode() return subprocess.check_output(cmd, **kwargs).decode()
else:
try: try:
subprocess.check_call(cmd, **kwargs) subprocess.check_call(cmd, **kwargs)
except subprocess.CalledProcessError as e: except subprocess.CalledProcessError as e:
# If the subprocess failed, it likely emitted its own distress message # If the subprocess failed, it likely emitted its own distress message
# already - don't scroll that message off the screen with a stack trace # already - don't scroll that message off the screen with a stack trace
# from this program as well. Emit a terse message and bail out here; # from this program as well. Emit a terse message and bail out here;
# otherwise a later step will try doing more work and may hide the # otherwise a later step will try doing more work and may hide the
# subprocess message. # subprocess message.
print('Subprocess failed with return code %d.' % e.returncode) print('Subprocess failed with return code %d.' % e.returncode)
sys.exit(e.returncode) sys.exit(e.returncode)
return '' return ''
class GclientCheckout(Checkout): class GclientCheckout(Checkout):
......
...@@ -238,7 +238,7 @@ class Hook(object): ...@@ -238,7 +238,7 @@ class Hook(object):
not gclient_eval.EvaluateCondition(self._condition, self._variables)): not gclient_eval.EvaluateCondition(self._condition, self._variables)):
return return
cmd = [arg for arg in self._action] cmd = list(self._action)
if cmd[0] == 'python': if cmd[0] == 'python':
cmd[0] = 'vpython' cmd[0] = 'vpython'
...@@ -373,8 +373,8 @@ class DependencySettings(object): ...@@ -373,8 +373,8 @@ class DependencySettings(object):
def target_os(self): def target_os(self):
if self.local_target_os is not None: if self.local_target_os is not None:
return tuple(set(self.local_target_os).union(self.parent.target_os)) return tuple(set(self.local_target_os).union(self.parent.target_os))
else:
return self.parent.target_os return self.parent.target_os
@property @property
def target_cpu(self): def target_cpu(self):
...@@ -1420,7 +1420,7 @@ solutions = %(solution_list)s ...@@ -1420,7 +1420,7 @@ solutions = %(solution_list)s
if 'all' in enforced_os: if 'all' in enforced_os:
enforced_os = self.DEPS_OS_CHOICES.values() enforced_os = self.DEPS_OS_CHOICES.values()
self._enforced_os = tuple(set(enforced_os)) self._enforced_os = tuple(set(enforced_os))
self._enforced_cpu = detect_host_arch.HostArch(), self._enforced_cpu = (detect_host_arch.HostArch(), )
self._root_dir = root_dir self._root_dir = root_dir
self._cipd_root = None self._cipd_root = None
self.config_content = None self.config_content = None
...@@ -1830,7 +1830,7 @@ it or fix the checkout. ...@@ -1830,7 +1830,7 @@ it or fix the checkout.
if command == 'update': if command == 'update':
gn_args_dep = self.dependencies[0] gn_args_dep = self.dependencies[0]
if gn_args_dep._gn_args_from: if gn_args_dep._gn_args_from:
deps_map = dict([(dep.name, dep) for dep in gn_args_dep.dependencies]) deps_map = {dep.name: dep for dep in gn_args_dep.dependencies}
gn_args_dep = deps_map.get(gn_args_dep._gn_args_from) gn_args_dep = deps_map.get(gn_args_dep._gn_args_from)
if gn_args_dep and gn_args_dep.HasGNArgsFile(): if gn_args_dep and gn_args_dep.HasGNArgsFile():
gn_args_dep.WriteGNArgsFile() gn_args_dep.WriteGNArgsFile()
...@@ -1874,11 +1874,12 @@ it or fix the checkout. ...@@ -1874,11 +1874,12 @@ it or fix the checkout.
entries = {} entries = {}
def GrabDeps(dep): def GrabDeps(dep):
"""Recursively grab dependencies.""" """Recursively grab dependencies."""
for d in dep.dependencies: for rec_d in dep.dependencies:
d.PinToActualRevision() rec_d.PinToActualRevision()
if ShouldPrintRevision(d): if ShouldPrintRevision(rec_d):
entries[d.name] = d.url entries[rec_d.name] = rec_d.url
GrabDeps(d) GrabDeps(rec_d)
GrabDeps(d) GrabDeps(d)
json_output.append({ json_output.append({
'name': d.name, 'name': d.name,
...@@ -2275,7 +2276,7 @@ class Flattener(object): ...@@ -2275,7 +2276,7 @@ class Flattener(object):
if key not in self._vars: if key not in self._vars:
continue continue
# Don't "override" existing vars if it's actually the same value. # Don't "override" existing vars if it's actually the same value.
elif self._vars[key][1] == value: if self._vars[key][1] == value:
continue continue
# Anything else is overriding a default value from the DEPS. # Anything else is overriding a default value from the DEPS.
self._vars[key] = (hierarchy + ' [custom_var override]', value) self._vars[key] = (hierarchy + ' [custom_var override]', value)
......
...@@ -38,8 +38,8 @@ class ConstantString(object): ...@@ -38,8 +38,8 @@ class ConstantString(object):
def __eq__(self, other): def __eq__(self, other):
if isinstance(other, ConstantString): if isinstance(other, ConstantString):
return self.value == other.value return self.value == other.value
else:
return self.value == other return self.value == other
def __hash__(self): def __hash__(self):
return self.value.__hash__() return self.value.__hash__()
...@@ -304,13 +304,14 @@ def _gclient_eval(node_or_string, filename='<unknown>', vars_dict=None): ...@@ -304,13 +304,14 @@ def _gclient_eval(node_or_string, filename='<unknown>', vars_dict=None):
raise ValueError( raise ValueError(
'%s takes exactly one argument (file %r, line %s)' % ( '%s takes exactly one argument (file %r, line %s)' % (
node.func.id, filename, getattr(node, 'lineno', '<unknown>'))) node.func.id, filename, getattr(node, 'lineno', '<unknown>')))
if node.func.id == 'Str': if node.func.id == 'Str':
if isinstance(node.args[0], ast.Str): if isinstance(node.args[0], ast.Str):
return ConstantString(node.args[0].s) return ConstantString(node.args[0].s)
raise ValueError('Passed a non-string to Str() (file %r, line%s)' % ( raise ValueError('Passed a non-string to Str() (file %r, line%s)' % (
filename, getattr(node, 'lineno', '<unknown>'))) filename, getattr(node, 'lineno', '<unknown>')))
else:
arg = _convert(node.args[0]) arg = _convert(node.args[0])
if not isinstance(arg, basestring): if not isinstance(arg, basestring):
raise ValueError( raise ValueError(
'Var\'s argument must be a variable name (file %r, line %s)' % ( 'Var\'s argument must be a variable name (file %r, line %s)' % (
...@@ -540,16 +541,20 @@ def EvaluateCondition(condition, variables, referenced_variables=None): ...@@ -540,16 +541,20 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
def _convert(node, allow_tuple=False): def _convert(node, allow_tuple=False):
if isinstance(node, ast.Str): if isinstance(node, ast.Str):
return node.s return node.s
elif isinstance(node, ast.Tuple) and allow_tuple:
if isinstance(node, ast.Tuple) and allow_tuple:
return tuple(map(_convert, node.elts)) return tuple(map(_convert, node.elts))
elif isinstance(node, ast.Name):
if isinstance(node, ast.Name):
if node.id in referenced_variables: if node.id in referenced_variables:
raise ValueError( raise ValueError(
'invalid cyclic reference to %r (inside %r)' % ( 'invalid cyclic reference to %r (inside %r)' % (
node.id, condition)) node.id, condition))
elif node.id in _allowed_names:
if node.id in _allowed_names:
return _allowed_names[node.id] return _allowed_names[node.id]
elif node.id in variables:
if node.id in variables:
value = variables[node.id] value = variables[node.id]
# Allow using "native" types, without wrapping everything in strings. # Allow using "native" types, without wrapping everything in strings.
...@@ -562,16 +567,18 @@ def EvaluateCondition(condition, variables, referenced_variables=None): ...@@ -562,16 +567,18 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
variables[node.id], variables[node.id],
variables, variables,
referenced_variables.union([node.id])) referenced_variables.union([node.id]))
else:
# Implicitly convert unrecognized names to strings. # Implicitly convert unrecognized names to strings.
# If we want to change this, we'll need to explicitly distinguish # If we want to change this, we'll need to explicitly distinguish
# between arguments for GN to be passed verbatim, and ones to # between arguments for GN to be passed verbatim, and ones to
# be evaluated. # be evaluated.
return node.id return node.id
elif not sys.version_info[:2] < (3, 4) and isinstance(
if not sys.version_info[:2] < (3, 4) and isinstance(
node, ast.NameConstant): # Since Python 3.4 node, ast.NameConstant): # Since Python 3.4
return node.value return node.value
elif isinstance(node, ast.BoolOp) and isinstance(node.op, ast.Or):
if isinstance(node, ast.BoolOp) and isinstance(node.op, ast.Or):
bool_values = [] bool_values = []
for value in node.values: for value in node.values:
bool_values.append(_convert(value)) bool_values.append(_convert(value))
...@@ -580,7 +587,8 @@ def EvaluateCondition(condition, variables, referenced_variables=None): ...@@ -580,7 +587,8 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
'invalid "or" operand %r (inside %r)' % ( 'invalid "or" operand %r (inside %r)' % (
bool_values[-1], condition)) bool_values[-1], condition))
return any(bool_values) return any(bool_values)
elif isinstance(node, ast.BoolOp) and isinstance(node.op, ast.And):
if isinstance(node, ast.BoolOp) and isinstance(node.op, ast.And):
bool_values = [] bool_values = []
for value in node.values: for value in node.values:
bool_values.append(_convert(value)) bool_values.append(_convert(value))
...@@ -589,13 +597,15 @@ def EvaluateCondition(condition, variables, referenced_variables=None): ...@@ -589,13 +597,15 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
'invalid "and" operand %r (inside %r)' % ( 'invalid "and" operand %r (inside %r)' % (
bool_values[-1], condition)) bool_values[-1], condition))
return all(bool_values) return all(bool_values)
elif isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not):
if isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not):
value = _convert(node.operand) value = _convert(node.operand)
if not isinstance(value, bool): if not isinstance(value, bool):
raise ValueError( raise ValueError(
'invalid "not" operand %r (inside %r)' % (value, condition)) 'invalid "not" operand %r (inside %r)' % (value, condition))
return not value return not value
elif isinstance(node, ast.Compare):
if isinstance(node, ast.Compare):
if len(node.ops) != 1: if len(node.ops) != 1:
raise ValueError( raise ValueError(
'invalid compare: exactly 1 operator required (inside %r)' % ( 'invalid compare: exactly 1 operator required (inside %r)' % (
...@@ -619,10 +629,10 @@ def EvaluateCondition(condition, variables, referenced_variables=None): ...@@ -619,10 +629,10 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
raise ValueError( raise ValueError(
'unexpected operator: %s %s (inside %r)' % ( 'unexpected operator: %s %s (inside %r)' % (
node.ops[0], ast.dump(node), condition)) node.ops[0], ast.dump(node), condition))
else:
raise ValueError( raise ValueError(
'unexpected AST node: %s %s (inside %r)' % ( 'unexpected AST node: %s %s (inside %r)' % (
node, ast.dump(node), condition)) node, ast.dump(node), condition))
return _convert(main_node) return _convert(main_node)
...@@ -738,7 +748,8 @@ def SetVar(gclient_dict, var_name, value): ...@@ -738,7 +748,8 @@ def SetVar(gclient_dict, var_name, value):
def _GetVarName(node): def _GetVarName(node):
if isinstance(node, ast.Call): if isinstance(node, ast.Call):
return node.args[0].s return node.args[0].s
elif node.s.endswith('}'):
if node.s.endswith('}'):
last_brace = node.s.rfind('{') last_brace = node.s.rfind('{')
return node.s[last_brace+1:-1] return node.s[last_brace+1:-1]
return None return None
...@@ -880,12 +891,14 @@ def GetRevision(gclient_dict, dep_name): ...@@ -880,12 +891,14 @@ def GetRevision(gclient_dict, dep_name):
dep = gclient_dict['deps'][dep_name] dep = gclient_dict['deps'][dep_name]
if dep is None: if dep is None:
return None return None
elif isinstance(dep, basestring):
if isinstance(dep, basestring):
_, _, revision = dep.partition('@') _, _, revision = dep.partition('@')
return revision or None return revision or None
elif isinstance(dep, collections_abc.Mapping) and 'url' in dep:
if isinstance(dep, collections_abc.Mapping) and 'url' in dep:
_, _, revision = dep['url'].partition('@') _, _, revision = dep['url'].partition('@')
return revision or None return revision or None
else:
raise ValueError( raise ValueError(
'%s is not a valid git dependency.' % dep_name) '%s is not a valid git dependency.' % dep_name)
...@@ -165,10 +165,10 @@ class SCMWrapper(object): ...@@ -165,10 +165,10 @@ class SCMWrapper(object):
if actual_remote_url: if actual_remote_url:
return (gclient_utils.SplitUrlRevision(actual_remote_url)[0].rstrip('/') return (gclient_utils.SplitUrlRevision(actual_remote_url)[0].rstrip('/')
== gclient_utils.SplitUrlRevision(self.url)[0].rstrip('/')) == gclient_utils.SplitUrlRevision(self.url)[0].rstrip('/'))
else:
# This may occur if the self.checkout_path exists but does not contain a # This may occur if the self.checkout_path exists but does not contain a
# valid git checkout. # valid git checkout.
return False return False
def _DeleteOrMove(self, force): def _DeleteOrMove(self, force):
"""Delete the checkout directory or move it out of the way. """Delete the checkout directory or move it out of the way.
...@@ -381,7 +381,8 @@ class GitWrapper(SCMWrapper): ...@@ -381,7 +381,8 @@ class GitWrapper(SCMWrapper):
if not target_rev: if not target_rev:
raise gclient_utils.Error('A target revision for the patch must be given') raise gclient_utils.Error('A target revision for the patch must be given')
elif target_rev.startswith(('refs/heads/', 'refs/branch-heads')):
if target_rev.startswith(('refs/heads/', 'refs/branch-heads')):
# If |target_rev| is in refs/heads/** or refs/branch-heads/**, try first # If |target_rev| is in refs/heads/** or refs/branch-heads/**, try first
# to find the corresponding remote ref for it, since |target_rev| might # to find the corresponding remote ref for it, since |target_rev| might
# point to a local ref which is not up to date with the corresponding # point to a local ref which is not up to date with the corresponding
...@@ -781,16 +782,18 @@ class GitWrapper(SCMWrapper): ...@@ -781,16 +782,18 @@ class GitWrapper(SCMWrapper):
printed_path=printed_path, merge=False) printed_path=printed_path, merge=False)
printed_path = True printed_path = True
break break
elif re.match(r'quit|q', action, re.I):
if re.match(r'quit|q', action, re.I):
raise gclient_utils.Error("Can't fast-forward, please merge or " raise gclient_utils.Error("Can't fast-forward, please merge or "
"rebase manually.\n" "rebase manually.\n"
"cd %s && git " % self.checkout_path "cd %s && git " % self.checkout_path
+ "rebase %s" % upstream_branch) + "rebase %s" % upstream_branch)
elif re.match(r'skip|s', action, re.I):
if re.match(r'skip|s', action, re.I):
self.Print('Skipping %s' % self.relpath) self.Print('Skipping %s' % self.relpath)
return return
else:
self.Print('Input not recognized') self.Print('Input not recognized')
elif re.match(b"error: Your local changes to '.*' would be " elif re.match(b"error: Your local changes to '.*' would be "
b"overwritten by merge. Aborting.\nPlease, commit your " b"overwritten by merge. Aborting.\nPlease, commit your "
b"changes or stash them before you can merge.\n", b"changes or stash them before you can merge.\n",
...@@ -1137,16 +1140,18 @@ class GitWrapper(SCMWrapper): ...@@ -1137,16 +1140,18 @@ class GitWrapper(SCMWrapper):
# Should this be recursive? # Should this be recursive?
rebase_output = scm.GIT.Capture(rebase_cmd, cwd=self.checkout_path) rebase_output = scm.GIT.Capture(rebase_cmd, cwd=self.checkout_path)
break break
elif re.match(r'quit|q', rebase_action, re.I):
if re.match(r'quit|q', rebase_action, re.I):
raise gclient_utils.Error("Please merge or rebase manually\n" raise gclient_utils.Error("Please merge or rebase manually\n"
"cd %s && git " % self.checkout_path "cd %s && git " % self.checkout_path
+ "%s" % ' '.join(rebase_cmd)) + "%s" % ' '.join(rebase_cmd))
elif re.match(r'show|s', rebase_action, re.I):
if re.match(r'show|s', rebase_action, re.I):
self.Print('%s' % e.stderr.decode('utf-8').strip()) self.Print('%s' % e.stderr.decode('utf-8').strip())
continue continue
else:
gclient_utils.Error("Input not recognized") gclient_utils.Error("Input not recognized")
continue continue
elif re.search(br'^CONFLICT', e.stdout, re.M): elif re.search(br'^CONFLICT', e.stdout, re.M):
raise gclient_utils.Error("Conflict while rebasing this branch.\n" raise gclient_utils.Error("Conflict while rebasing this branch.\n"
"Fix the conflict and run gclient again.\n" "Fix the conflict and run gclient again.\n"
...@@ -1559,15 +1564,12 @@ class CipdWrapper(SCMWrapper): ...@@ -1559,15 +1564,12 @@ class CipdWrapper(SCMWrapper):
CIPD packages should be reverted at the root by running CIPD packages should be reverted at the root by running
`CipdRoot.run('revert')`. `CipdRoot.run('revert')`.
""" """
pass
def diff(self, options, args, file_list): def diff(self, options, args, file_list):
"""CIPD has no notion of diffing.""" """CIPD has no notion of diffing."""
pass
def pack(self, options, args, file_list): def pack(self, options, args, file_list):
"""CIPD has no notion of diffing.""" """CIPD has no notion of diffing."""
pass
def revinfo(self, options, args, file_list): def revinfo(self, options, args, file_list):
"""Grab the instance ID.""" """Grab the instance ID."""
...@@ -1597,4 +1599,3 @@ class CipdWrapper(SCMWrapper): ...@@ -1597,4 +1599,3 @@ class CipdWrapper(SCMWrapper):
CIPD packages should be updated at the root by running CIPD packages should be updated at the root by running
`CipdRoot.run('update')`. `CipdRoot.run('update')`.
""" """
pass
...@@ -301,8 +301,8 @@ def rmtree(path): ...@@ -301,8 +301,8 @@ def rmtree(path):
exitcode = subprocess.call(['cmd.exe', '/c', 'rd', '/q', '/s', path]) exitcode = subprocess.call(['cmd.exe', '/c', 'rd', '/q', '/s', path])
if exitcode == 0: if exitcode == 0:
return return
else:
print('rd exited with code %d' % exitcode, file=sys.stderr) print('rd exited with code %d' % exitcode, file=sys.stderr)
time.sleep(3) time.sleep(3)
raise Exception('Failed to remove path %s' % path) raise Exception('Failed to remove path %s' % path)
...@@ -437,11 +437,12 @@ class Annotated(Wrapper): ...@@ -437,11 +437,12 @@ class Annotated(Wrapper):
lf_loc = obj[0].find(b'\n') lf_loc = obj[0].find(b'\n')
if cr_loc == lf_loc == -1: if cr_loc == lf_loc == -1:
break break
elif cr_loc == -1 or (lf_loc >= 0 and lf_loc < cr_loc):
if cr_loc == -1 or (0 <= lf_loc < cr_loc):
line, remaining = obj[0].split(b'\n', 1) line, remaining = obj[0].split(b'\n', 1)
if line: if line:
self._wrapped_write(b'%d>%s\n' % (index, line)) self._wrapped_write(b'%d>%s\n' % (index, line))
elif lf_loc == -1 or (cr_loc >= 0 and cr_loc < lf_loc): elif lf_loc == -1 or (0 <= cr_loc < lf_loc):
line, remaining = obj[0].split(b'\r', 1) line, remaining = obj[0].split(b'\r', 1)
if line: if line:
self._wrapped_write(b'%d>%s\r' % (index, line)) self._wrapped_write(b'%d>%s\r' % (index, line))
...@@ -750,12 +751,16 @@ def GetMacWinAixOrLinux(): ...@@ -750,12 +751,16 @@ def GetMacWinAixOrLinux():
"""Returns 'mac', 'win', or 'linux', matching the current platform.""" """Returns 'mac', 'win', or 'linux', matching the current platform."""
if sys.platform.startswith(('cygwin', 'win')): if sys.platform.startswith(('cygwin', 'win')):
return 'win' return 'win'
elif sys.platform.startswith('linux'):
if sys.platform.startswith('linux'):
return 'linux' return 'linux'
elif sys.platform == 'darwin':
if sys.platform == 'darwin':
return 'mac' return 'mac'
elif sys.platform.startswith('aix'):
if sys.platform.startswith('aix'):
return 'aix' return 'aix'
raise Error('Unknown platform: ' + sys.platform) raise Error('Unknown platform: ' + sys.platform)
...@@ -806,7 +811,6 @@ class WorkItem(object): ...@@ -806,7 +811,6 @@ class WorkItem(object):
def run(self, work_queue): def run(self, work_queue):
"""work_queue is passed as keyword argument so it should be """work_queue is passed as keyword argument so it should be
the last parameters of the function when you override it.""" the last parameters of the function when you override it."""
pass
@property @property
def name(self): def name(self):
...@@ -1211,8 +1215,8 @@ def DefaultDeltaBaseCacheLimit(): ...@@ -1211,8 +1215,8 @@ def DefaultDeltaBaseCacheLimit():
""" """
if platform.architecture()[0].startswith('64'): if platform.architecture()[0].startswith('64'):
return '2g' return '2g'
else:
return '512m' return '512m'
def DefaultIndexPackConfig(url=''): def DefaultIndexPackConfig(url=''):
...@@ -1259,13 +1263,15 @@ def freeze(obj): ...@@ -1259,13 +1263,15 @@ def freeze(obj):
""" """
if isinstance(obj, collections_abc.Mapping): if isinstance(obj, collections_abc.Mapping):
return FrozenDict((freeze(k), freeze(v)) for k, v in obj.items()) return FrozenDict((freeze(k), freeze(v)) for k, v in obj.items())
elif isinstance(obj, (list, tuple)):
if isinstance(obj, (list, tuple)):
return tuple(freeze(i) for i in obj) return tuple(freeze(i) for i in obj)
elif isinstance(obj, set):
if isinstance(obj, set):
return frozenset(freeze(i) for i in obj) return frozenset(freeze(i) for i in obj)
else:
hash(obj) hash(obj)
return obj return obj
class FrozenDict(collections_abc.Mapping): class FrozenDict(collections_abc.Mapping):
......
...@@ -254,8 +254,8 @@ class CookiesAuthenticator(Authenticator): ...@@ -254,8 +254,8 @@ class CookiesAuthenticator(Authenticator):
if a[0]: if a[0]:
secret = base64.b64encode(('%s:%s' % (a[0], a[2])).encode('utf-8')) secret = base64.b64encode(('%s:%s' % (a[0], a[2])).encode('utf-8'))
return 'Basic %s' % secret.decode('utf-8') return 'Basic %s' % secret.decode('utf-8')
else:
return 'Bearer %s' % a[2] return 'Bearer %s' % a[2]
return None return None
def get_auth_email(self, host): def get_auth_email(self, host):
...@@ -696,7 +696,8 @@ def GetChangeReview(host, change, revision=None): ...@@ -696,7 +696,8 @@ def GetChangeReview(host, change, revision=None):
jmsg = GetChangeRevisions(host, change) jmsg = GetChangeRevisions(host, change)
if not jmsg: if not jmsg:
return None return None
elif len(jmsg) > 1:
if len(jmsg) > 1:
raise GerritError(200, 'Multiple changes found for ChangeId %s.' % change) raise GerritError(200, 'Multiple changes found for ChangeId %s.' % change)
revision = jmsg[0]['current_revision'] revision = jmsg[0]['current_revision']
path = 'changes/%s/revisions/%s/review' path = 'changes/%s/revisions/%s/review'
...@@ -981,7 +982,8 @@ def ResetReviewLabels(host, change, label, value='0', message=None, ...@@ -981,7 +982,8 @@ def ResetReviewLabels(host, change, label, value='0', message=None,
if not jmsg: if not jmsg:
raise GerritError( raise GerritError(
200, 'Could not get review information for change "%s"' % change) 200, 'Could not get review information for change "%s"' % change)
elif jmsg[0]['current_revision'] != revision:
if jmsg[0]['current_revision'] != revision:
raise GerritError(200, 'While resetting labels on change "%s", ' raise GerritError(200, 'While resetting labels on change "%s", '
'a new patchset was uploaded.' % change) 'a new patchset was uploaded.' % change)
...@@ -1000,7 +1002,7 @@ def CreateChange(host, project, branch='main', subject='', params=()): ...@@ -1000,7 +1002,7 @@ def CreateChange(host, project, branch='main', subject='', params=()):
""" """
path = 'changes/' path = 'changes/'
body = {'project': project, 'branch': branch, 'subject': subject} body = {'project': project, 'branch': branch, 'subject': subject}
body.update({k: v for k, v in params}) body.update(dict(params))
for key in 'project', 'branch', 'subject': for key in 'project', 'branch', 'subject':
if not body[key]: if not body[key]:
raise GerritError(200, '%s is required' % key.title()) raise GerritError(200, '%s is required' % key.title())
......
...@@ -116,7 +116,7 @@ class Mirror(object): ...@@ -116,7 +116,7 @@ class Mirror(object):
def __init__(self, url, refs=None, commits=None, print_func=None): def __init__(self, url, refs=None, commits=None, print_func=None):
self.url = url self.url = url
self.fetch_specs = set([self.parse_fetch_spec(ref) for ref in (refs or [])]) self.fetch_specs = {self.parse_fetch_spec(ref) for ref in (refs or [])}
self.fetch_commits = set(commits or []) self.fetch_commits = set(commits or [])
self.basedir = self.UrlToCacheDir(url) self.basedir = self.UrlToCacheDir(url)
self.mirror_path = os.path.join(self.GetCachePath(), self.basedir) self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
...@@ -576,8 +576,7 @@ class Mirror(object): ...@@ -576,8 +576,7 @@ class Mirror(object):
if not prev_dest_prefix: if not prev_dest_prefix:
return return
for path in ls_out_set: for path in ls_out_set:
if (path == prev_dest_prefix + '/' or if path in (prev_dest_prefix + '/', prev_dest_prefix + '.ready'):
path == prev_dest_prefix + '.ready'):
continue continue
if path.endswith('.ready'): if path.endswith('.ready'):
gsutil.call('rm', path) gsutil.call('rm', path)
......
...@@ -638,7 +638,7 @@ def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None): ...@@ -638,7 +638,7 @@ def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None):
yapf_file = os.path.join(fpath, YAPF_CONFIG_FILENAME) yapf_file = os.path.join(fpath, YAPF_CONFIG_FILENAME)
if os.path.isfile(yapf_file): if os.path.isfile(yapf_file):
ret = yapf_file ret = yapf_file
elif fpath == top_dir or parent_dir == fpath: elif fpath in (top_dir, parent_dir):
# If we're at the top level directory, or if we're at root # If we're at the top level directory, or if we're at root
# there is no provided style. # there is no provided style.
ret = None ret = None
...@@ -1720,18 +1720,18 @@ class Changelist(object): ...@@ -1720,18 +1720,18 @@ class Changelist(object):
if not force: if not force:
confirm_or_exit('If you know what you are doing', action='continue') confirm_or_exit('If you know what you are doing', action='continue')
return return
else:
missing = ( missing = (
([] if gerrit_auth else [self._gerrit_host]) + ([] if gerrit_auth else [self._gerrit_host]) +
([] if git_auth else [git_host])) ([] if git_auth else [git_host]))
DieWithError('Credentials for the following hosts are required:\n' DieWithError('Credentials for the following hosts are required:\n'
' %s\n' ' %s\n'
'These are read from %s (or legacy %s)\n' 'These are read from %s (or legacy %s)\n'
'%s' % ( '%s' % (
'\n '.join(missing), '\n '.join(missing),
cookie_auth.get_gitcookies_path(), cookie_auth.get_gitcookies_path(),
cookie_auth.get_netrc_path(), cookie_auth.get_netrc_path(),
cookie_auth.get_new_password_message(git_host))) cookie_auth.get_new_password_message(git_host)))
def EnsureCanUploadPatchset(self, force): def EnsureCanUploadPatchset(self, force):
if not self.GetIssue(): if not self.GetIssue():
...@@ -1820,9 +1820,9 @@ class Changelist(object): ...@@ -1820,9 +1820,9 @@ class Changelist(object):
if m.get('author', {}).get('_account_id') == owner: if m.get('author', {}).get('_account_id') == owner:
# Most recent message was by owner. # Most recent message was by owner.
return 'waiting' return 'waiting'
else:
# Some reply from non-owner. # Some reply from non-owner.
return 'reply' return 'reply'
# Somehow there are no messages even though there are reviewers. # Somehow there are no messages even though there are reviewers.
return 'unsent' return 'unsent'
...@@ -1845,9 +1845,9 @@ class Changelist(object): ...@@ -1845,9 +1845,9 @@ class Changelist(object):
data = self._GetChangeDetail(['ALL_REVISIONS']) data = self._GetChangeDetail(['ALL_REVISIONS'])
patchset = data['revisions'][data['current_revision']]['_number'] patchset = data['revisions'][data['current_revision']]['_number']
dry_run = set([int(m['_revision_number']) dry_run = {int(m['_revision_number'])
for m in data.get('messages', []) for m in data.get('messages', [])
if m.get('tag', '').endswith('dry-run')]) if m.get('tag', '').endswith('dry-run')}
for revision_info in sorted(data.get('revisions', {}).values(), for revision_info in sorted(data.get('revisions', {}).values(),
key=lambda c: c['_number'], reverse=True): key=lambda c: c['_number'], reverse=True):
...@@ -2634,8 +2634,8 @@ class Changelist(object): ...@@ -2634,8 +2634,8 @@ class Changelist(object):
if git_footers.get_footer_change_id(new_log_desc): if git_footers.get_footer_change_id(new_log_desc):
print('git-cl: Added Change-Id to commit message.') print('git-cl: Added Change-Id to commit message.')
return new_log_desc return new_log_desc
else:
DieWithError('ERROR: Gerrit commit-msg hook not installed.') DieWithError('ERROR: Gerrit commit-msg hook not installed.')
def CannotTriggerTryJobReason(self): def CannotTriggerTryJobReason(self):
try: try:
...@@ -3341,10 +3341,10 @@ def CMDbaseurl(parser, args): ...@@ -3341,10 +3341,10 @@ def CMDbaseurl(parser, args):
print('Current base-url:') print('Current base-url:')
return RunGit(['config', 'branch.%s.base-url' % branch], return RunGit(['config', 'branch.%s.base-url' % branch],
error_ok=False).strip() error_ok=False).strip()
else:
print('Setting base-url to %s' % args[0]) print('Setting base-url to %s' % args[0])
return RunGit(['config', 'branch.%s.base-url' % branch, args[0]], return RunGit(['config', 'branch.%s.base-url' % branch, args[0]],
error_ok=False).strip() error_ok=False).strip()
def color_for_status(status): def color_for_status(status):
...@@ -3605,13 +3605,15 @@ def CMDarchive(parser, args): ...@@ -3605,13 +3605,15 @@ def CMDarchive(parser, args):
if options.dry_run: if options.dry_run:
print('\nNo changes were made (dry run).\n') print('\nNo changes were made (dry run).\n')
return 0 return 0
elif any(branch == current_branch for branch, _ in proposal):
if any(branch == current_branch for branch, _ in proposal):
print('You are currently on a branch \'%s\' which is associated with a ' print('You are currently on a branch \'%s\' which is associated with a '
'closed codereview issue, so archive cannot proceed. Please ' 'closed codereview issue, so archive cannot proceed. Please '
'checkout another branch and run this command again.' % 'checkout another branch and run this command again.' %
current_branch) current_branch)
return 1 return 1
elif not options.force:
if not options.force:
answer = gclient_utils.AskForData('\nProceed with deletion (Y/n)? ').lower() answer = gclient_utils.AskForData('\nProceed with deletion (Y/n)? ').lower()
if answer not in ('y', ''): if answer not in ('y', ''):
print('Aborted.') print('Aborted.')
...@@ -4148,7 +4150,9 @@ def GetTargetRef(remote, remote_branch, target_branch): ...@@ -4148,7 +4150,9 @@ def GetTargetRef(remote, remote_branch, target_branch):
if not match: if not match:
# This is a branch path but not one we recognize; use as-is. # This is a branch path but not one we recognize; use as-is.
remote_branch = target_branch remote_branch = target_branch
# pylint: disable=consider-using-get
elif remote_branch in REFS_THAT_ALIAS_TO_OTHER_REFS: elif remote_branch in REFS_THAT_ALIAS_TO_OTHER_REFS:
# pylint: enable=consider-using-get
# Handle the refs that need to land in different refs. # Handle the refs that need to land in different refs.
remote_branch = REFS_THAT_ALIAS_TO_OTHER_REFS[remote_branch] remote_branch = REFS_THAT_ALIAS_TO_OTHER_REFS[remote_branch]
...@@ -4565,8 +4569,10 @@ def GetTreeStatus(url=None): ...@@ -4565,8 +4569,10 @@ def GetTreeStatus(url=None):
status = str(urllib.request.urlopen(url).read().lower()) status = str(urllib.request.urlopen(url).read().lower())
if status.find('closed') != -1 or status == '0': if status.find('closed') != -1 or status == '0':
return 'closed' return 'closed'
elif status.find('open') != -1 or status == '1':
if status.find('open') != -1 or status == '1':
return 'open' return 'open'
return 'unknown' return 'unknown'
return 'unset' return 'unset'
...@@ -5102,8 +5108,8 @@ def _RunRustFmt(opts, rust_diff_files, top_dir, upstream_commit): ...@@ -5102,8 +5108,8 @@ def _RunRustFmt(opts, rust_diff_files, top_dir, upstream_commit):
if opts.presubmit and rustfmt_exitcode != 0: if opts.presubmit and rustfmt_exitcode != 0:
return 2 return 2
else:
return 0 return 0
def MatchingFileType(file_name, extensions): def MatchingFileType(file_name, extensions):
......
...@@ -46,6 +46,7 @@ from io import BytesIO ...@@ -46,6 +46,7 @@ from io import BytesIO
if sys.version_info.major == 2: if sys.version_info.major == 2:
# On Python 3, BrokenPipeError is raised instead. # On Python 3, BrokenPipeError is raised instead.
# pylint:disable=redefined-builtin
BrokenPipeError = IOError BrokenPipeError = IOError
...@@ -874,7 +875,7 @@ def status(): ...@@ -874,7 +875,7 @@ def status():
while c != b'': while c != b'':
c = stream.read(1) c = stream.read(1)
if c in (None, b'', b'\0'): if c in (None, b'', b'\0'):
if len(acc.getvalue()): if len(acc.getvalue()) > 0:
yield acc.getvalue() yield acc.getvalue()
acc = BytesIO() acc = BytesIO()
else: else:
......
...@@ -71,7 +71,8 @@ def split_footers(message): ...@@ -71,7 +71,8 @@ def split_footers(message):
for line in reversed(message_lines): for line in reversed(message_lines):
if line == '' or line.isspace(): if line == '' or line.isspace():
break break
elif parse_footer(line):
if parse_footer(line):
footer_lines.extend(maybe_footer_lines) footer_lines.extend(maybe_footer_lines)
maybe_footer_lines = [] maybe_footer_lines = []
footer_lines.append(line) footer_lines.append(line)
...@@ -182,8 +183,8 @@ def get_unique(footers, key): ...@@ -182,8 +183,8 @@ def get_unique(footers, key):
assert len(values) <= 1, 'Multiple %s footers' % key assert len(values) <= 1, 'Multiple %s footers' % key
if values: if values:
return values[0] return values[0]
else:
return None return None
def get_position(footers): def get_position(footers):
......
...@@ -31,6 +31,7 @@ from third_party import colorama ...@@ -31,6 +31,7 @@ from third_party import colorama
if sys.version_info.major == 2: if sys.version_info.major == 2:
# On Python 3, BrokenPipeError is raised instead. # On Python 3, BrokenPipeError is raised instead.
# pylint:disable=redefined-builtin
BrokenPipeError = IOError BrokenPipeError = IOError
...@@ -68,7 +69,7 @@ def _print_help(outbuf): ...@@ -68,7 +69,7 @@ def _print_help(outbuf):
def _color_branch(branch, all_branches, all_tags, current): def _color_branch(branch, all_branches, all_tags, current):
if branch == current or branch == 'HEAD -> ' + current: if branch in (current, 'HEAD -> ' + current):
color = CYAN color = CYAN
current = None current = None
elif branch in all_branches: elif branch in all_branches:
......
...@@ -41,7 +41,8 @@ def main(args): ...@@ -41,7 +41,8 @@ def main(args):
if not downstreams: if not downstreams:
print("No downstream branches") print("No downstream branches")
return 1 return 1
elif len(downstreams) == 1:
if len(downstreams) == 1:
run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr) run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
else: else:
high = len(downstreams) - 1 high = len(downstreams) - 1
......
...@@ -62,8 +62,8 @@ def pathlify(hash_prefix): ...@@ -62,8 +62,8 @@ def pathlify(hash_prefix):
""" """
if sys.version_info.major == 3: if sys.version_info.major == 3:
return '/'.join('%02x' % b for b in hash_prefix) return '/'.join('%02x' % b for b in hash_prefix)
else:
return '/'.join('%02x' % ord(b) for b in hash_prefix) return '/'.join('%02x' % ord(b) for b in hash_prefix)
@git.memoize_one(threadsafe=False) @git.memoize_one(threadsafe=False)
......
...@@ -66,8 +66,7 @@ def main(args): ...@@ -66,8 +66,7 @@ def main(args):
print( print(
'gn.py: Could not find gn executable at: %s' % gn_path, file=sys.stderr) 'gn.py: Could not find gn executable at: %s' % gn_path, file=sys.stderr)
return 2 return 2
else: return subprocess.call([gn_path] + args[1:])
return subprocess.call([gn_path] + args[1:])
if __name__ == '__main__': if __name__ == '__main__':
......
...@@ -54,7 +54,8 @@ def get_notice_footer(): ...@@ -54,7 +54,8 @@ def get_notice_footer():
def get_change_notice(version): def get_change_notice(version):
if version == 0: if version == 0:
return [] # No changes for version 0 return [] # No changes for version 0
elif version == 1:
if version == 1:
return [ return [
'We want to collect the Git version.', 'We want to collect the Git version.',
'We want to collect information about the HTTP', 'We want to collect information about the HTTP',
...@@ -64,7 +65,8 @@ def get_change_notice(version): ...@@ -64,7 +65,8 @@ def get_change_notice(version):
'We only collect known strings to make sure we', 'We only collect known strings to make sure we',
'don\'t record PII.', 'don\'t record PII.',
] ]
elif version == 2:
if version == 2:
return [ return [
'We will start collecting metrics from bots.', 'We will start collecting metrics from bots.',
'There are no changes for developers.', 'There are no changes for developers.',
......
...@@ -68,7 +68,7 @@ try: ...@@ -68,7 +68,7 @@ try:
from dateutil.relativedelta import relativedelta from dateutil.relativedelta import relativedelta
except ImportError: except ImportError:
logging.error('python-dateutil package required') logging.error('python-dateutil package required')
exit(1) sys.exit(1)
class DefaultFormatter(Formatter): class DefaultFormatter(Formatter):
...@@ -76,10 +76,11 @@ class DefaultFormatter(Formatter): ...@@ -76,10 +76,11 @@ class DefaultFormatter(Formatter):
super(DefaultFormatter, self).__init__() super(DefaultFormatter, self).__init__()
self.default = default self.default = default
def get_value(self, key, args, kwds): def get_value(self, key, args, kwargs):
if isinstance(key, str) and key not in kwds: if isinstance(key, str) and key not in kwargs:
return self.default return self.default
return Formatter.get_value(self, key, args, kwds) return Formatter.get_value(self, key, args, kwargs)
gerrit_instances = [ gerrit_instances = [
{ {
...@@ -168,9 +169,10 @@ def get_week_of(date): ...@@ -168,9 +169,10 @@ def get_week_of(date):
def get_yes_or_no(msg): def get_yes_or_no(msg):
while True: while True:
response = gclient_utils.AskForData(msg + ' yes/no [no] ') response = gclient_utils.AskForData(msg + ' yes/no [no] ')
if response == 'y' or response == 'yes': if response in ('y', 'yes'):
return True return True
elif not response or response == 'n' or response == 'no':
if not response or response in ('n', 'no'):
return False return False
...@@ -410,7 +412,7 @@ class MyActivity(object): ...@@ -410,7 +412,7 @@ class MyActivity(object):
return [ return [
issue for issue in issues issue for issue in issues
if issue['author'] == user_str or issue['owner'] == user_str] if user_str in (issue['author'], issue['owner'])]
def monorail_get_issues(self, project, issue_ids): def monorail_get_issues(self, project, issue_ids):
return self.monorail_query_issues(project, { return self.monorail_query_issues(project, {
...@@ -531,7 +533,7 @@ class MyActivity(object): ...@@ -531,7 +533,7 @@ class MyActivity(object):
return True return True
def filter_modified(self, modified): def filter_modified(self, modified):
return self.modified_after < modified and modified < self.modified_before return self.modified_after < modified < self.modified_before
def auth_for_changes(self): def auth_for_changes(self):
#TODO(cjhopman): Move authentication check for getting changes here. #TODO(cjhopman): Move authentication check for getting changes here.
...@@ -686,12 +688,12 @@ class MyActivity(object): ...@@ -686,12 +688,12 @@ class MyActivity(object):
return output return output
class PythonObjectEncoder(json.JSONEncoder): class PythonObjectEncoder(json.JSONEncoder):
def default(self, obj): # pylint: disable=method-hidden def default(self, o): # pylint: disable=method-hidden
if isinstance(obj, datetime): if isinstance(o, datetime):
return obj.isoformat() return o.isoformat()
if isinstance(obj, set): if isinstance(o, set):
return list(obj) return list(o)
return json.JSONEncoder.default(self, obj) return json.JSONEncoder.default(self, o)
output = { output = {
'reviews': format_for_json_dump(self.reviews), 'reviews': format_for_json_dump(self.reviews),
...@@ -918,16 +920,16 @@ def main(): ...@@ -918,16 +920,16 @@ def main():
config = json.load(f) config = json.load(f)
for item, entries in config.items(): for item, entries in config.items():
if item == 'gerrit_instances': if item == 'gerrit_instances':
for repo, dic in entries.items(): for repo, dic in entries.items():
# Use property name as URL # Use property name as URL
dic['url'] = repo dic['url'] = repo
gerrit_instances.append(dic) gerrit_instances.append(dic)
elif item == 'monorail_projects': elif item == 'monorail_projects':
monorail_projects.append(entries) monorail_projects.append(entries)
else: else:
logging.error('Invalid entry in config file.') logging.error('Invalid entry in config file.')
return 1 return 1
my_activity = MyActivity(options) my_activity = MyActivity(options)
my_activity.show_progress('Loading data') my_activity.show_progress('Loading data')
......
...@@ -95,31 +95,39 @@ class OwnersFinder(object): ...@@ -95,31 +95,39 @@ class OwnersFinder(object):
while True: while True:
inp = self.input_command(owner) inp = self.input_command(owner)
if inp == 'y' or inp == 'yes': if inp in ('y', 'yes'):
self.select_owner(owner) self.select_owner(owner)
break break
elif inp == 'n' or inp == 'no':
if inp in ('n', 'no'):
self.deselect_owner(owner) self.deselect_owner(owner)
break break
elif inp == '' or inp == 'd' or inp == 'defer':
if inp in ('', 'd', 'defer'):
self.owners_queue.append(self.owners_queue.pop(0)) self.owners_queue.append(self.owners_queue.pop(0))
break break
elif inp == 'f' or inp == 'files':
if inp in ('f', 'files'):
self.list_files() self.list_files()
break break
elif inp == 'o' or inp == 'owners':
if inp in ('o', 'owners'):
self.list_owners(self.owners_queue) self.list_owners(self.owners_queue)
break break
elif inp == 'p' or inp == 'pick':
if inp in ('p', 'pick'):
self.pick_owner(gclient_utils.AskForData('Pick an owner: ')) self.pick_owner(gclient_utils.AskForData('Pick an owner: '))
break break
elif inp.startswith('p ') or inp.startswith('pick '):
if inp.startswith('p ') or inp.startswith('pick '):
self.pick_owner(inp.split(' ', 2)[1].strip()) self.pick_owner(inp.split(' ', 2)[1].strip())
break break
elif inp == 'r' or inp == 'restart':
if inp in ('r', 'restart'):
self.reset() self.reset()
break break
elif inp == 'q' or inp == 'quit':
if inp in ('q', 'quit'):
# Exit with error # Exit with error
return 1 return 1
...@@ -268,11 +276,13 @@ class OwnersFinder(object): ...@@ -268,11 +276,13 @@ class OwnersFinder(object):
self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually. ' + self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually. ' +
'It\'s an invalid name or not related to the change list.') 'It\'s an invalid name or not related to the change list.')
return False return False
elif ow in self.selected_owners:
if ow in self.selected_owners:
self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually. ' + self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually. ' +
'It\'s already selected.') 'It\'s already selected.')
return False return False
elif ow in self.deselected_owners:
if ow in self.deselected_owners:
self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually.' + self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually.' +
'It\'s already unselected.') 'It\'s already unselected.')
return False return False
......
...@@ -70,9 +70,9 @@ def CheckChangeHasBugField(input_api, output_api): ...@@ -70,9 +70,9 @@ def CheckChangeHasBugField(input_api, output_api):
'Buganizer bugs should be prefixed with b:, not b/.') 'Buganizer bugs should be prefixed with b:, not b/.')
] ]
return [] return []
else:
return [output_api.PresubmitNotifyResult( return [output_api.PresubmitNotifyResult(
'If this change has an associated bug, add Bug: [bug number].')] 'If this change has an associated bug, add Bug: [bug number].')]
def CheckChangeHasNoUnwantedTags(input_api, output_api): def CheckChangeHasNoUnwantedTags(input_api, output_api):
UNWANTED_TAGS = { UNWANTED_TAGS = {
...@@ -94,12 +94,13 @@ def CheckChangeHasNoUnwantedTags(input_api, output_api): ...@@ -94,12 +94,13 @@ def CheckChangeHasNoUnwantedTags(input_api, output_api):
def CheckDoNotSubmitInDescription(input_api, output_api): def CheckDoNotSubmitInDescription(input_api, output_api):
"""Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description. """Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description.
""" """
keyword = 'DO NOT ''SUBMIT' # Keyword is concatenated to avoid presubmit check rejecting the CL.
keyword = 'DO NOT ' + 'SUBMIT'
if keyword in input_api.change.DescriptionText(): if keyword in input_api.change.DescriptionText():
return [output_api.PresubmitError( return [output_api.PresubmitError(
keyword + ' is present in the changelist description.')] keyword + ' is present in the changelist description.')]
else:
return [] return []
def CheckChangeHasDescription(input_api, output_api): def CheckChangeHasDescription(input_api, output_api):
...@@ -108,8 +109,8 @@ def CheckChangeHasDescription(input_api, output_api): ...@@ -108,8 +109,8 @@ def CheckChangeHasDescription(input_api, output_api):
if text.strip() == '': if text.strip() == '':
if input_api.is_committing: if input_api.is_committing:
return [output_api.PresubmitError('Add a description to the CL.')] return [output_api.PresubmitError('Add a description to the CL.')]
else:
return [output_api.PresubmitNotifyResult('Add a description to the CL.')] return [output_api.PresubmitNotifyResult('Add a description to the CL.')]
return [] return []
...@@ -188,7 +189,9 @@ def CheckDoNotSubmitInFiles(input_api, output_api): ...@@ -188,7 +189,9 @@ def CheckDoNotSubmitInFiles(input_api, output_api):
"""Checks that the user didn't add 'DO NOT ''SUBMIT' to any files.""" """Checks that the user didn't add 'DO NOT ''SUBMIT' to any files."""
# We want to check every text file, not just source files. # We want to check every text file, not just source files.
file_filter = lambda x : x file_filter = lambda x : x
keyword = 'DO NOT ''SUBMIT'
# Keyword is concatenated to avoid presubmit check rejecting the CL.
keyword = 'DO NOT ' + 'SUBMIT'
def DoNotSubmitRule(extension, line): def DoNotSubmitRule(extension, line):
try: try:
return keyword not in line return keyword not in line
...@@ -315,7 +318,7 @@ def CheckGenderNeutral(input_api, output_api, source_file_filter=None): ...@@ -315,7 +318,7 @@ def CheckGenderNeutral(input_api, output_api, source_file_filter=None):
if gendered_re.search(line): if gendered_re.search(line):
errors.append('%s (%d): %s' % (f.LocalPath(), line_num, line)) errors.append('%s (%d): %s' % (f.LocalPath(), line_num, line))
if len(errors): if errors:
return [output_api.PresubmitPromptWarning('Found a gendered pronoun in:', return [output_api.PresubmitPromptWarning('Found a gendered pronoun in:',
long_text='\n'.join(errors))] long_text='\n'.join(errors))]
return [] return []
...@@ -526,7 +529,7 @@ def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None): ...@@ -526,7 +529,7 @@ def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None):
"""True iff the pylint directive starting at line[pos] is global.""" """True iff the pylint directive starting at line[pos] is global."""
# Any character before |pos| that is not whitespace or '#' indidcates # Any character before |pos| that is not whitespace or '#' indidcates
# this is a local directive. # this is a local directive.
return not any([c not in " \t#" for c in line[:pos]]) return not any(c not in " \t#" for c in line[:pos])
def check_python_long_lines(affected_files, error_formatter): def check_python_long_lines(affected_files, error_formatter):
errors = [] errors = []
...@@ -583,8 +586,8 @@ def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None): ...@@ -583,8 +586,8 @@ def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None):
if errors: if errors:
msg = 'Found lines longer than %s characters (first 5 shown).' % maxlen msg = 'Found lines longer than %s characters (first 5 shown).' % maxlen
return [output_api.PresubmitPromptWarning(msg, items=errors[:5])] return [output_api.PresubmitPromptWarning(msg, items=errors[:5])]
else:
return [] return []
def CheckLicense(input_api, output_api, license_re=None, project_name=None, def CheckLicense(input_api, output_api, license_re=None, project_name=None,
...@@ -960,7 +963,7 @@ def GetPylint(input_api, ...@@ -960,7 +963,7 @@ def GetPylint(input_api,
extra_paths_list = extra_paths_list or [] extra_paths_list = extra_paths_list or []
assert version in ('1.5', '2.6', '2.7'), \ assert version in ('1.5', '2.6', '2.7'), \
'Unsupported pylint version: ' + version 'Unsupported pylint version: %s' % version
python2 = (version == '1.5') python2 = (version == '1.5')
if input_api.is_committing: if input_api.is_committing:
...@@ -1069,11 +1072,10 @@ def GetPylint(input_api, ...@@ -1069,11 +1072,10 @@ def GetPylint(input_api,
GetPylintCmd(files, ["--disable=cyclic-import"], True), GetPylintCmd(files, ["--disable=cyclic-import"], True),
GetPylintCmd(files, ["--disable=all", "--enable=cyclic-import"], False) GetPylintCmd(files, ["--disable=all", "--enable=cyclic-import"], False)
] ]
else:
return [ GetPylintCmd(files, [], True) ]
else: return [ GetPylintCmd(files, [], True) ]
return map(lambda x: GetPylintCmd([x], [], 1), files)
return map(lambda x: GetPylintCmd([x], [], 1), files)
def RunPylint(input_api, *args, **kwargs): def RunPylint(input_api, *args, **kwargs):
...@@ -1090,11 +1092,11 @@ def CheckDirMetadataFormat(input_api, output_api, dirmd_bin=None): ...@@ -1090,11 +1092,11 @@ def CheckDirMetadataFormat(input_api, output_api, dirmd_bin=None):
# complete. # complete.
file_filter = lambda f: ( file_filter = lambda f: (
input_api.basename(f.LocalPath()) in ('DIR_METADATA', 'OWNERS')) input_api.basename(f.LocalPath()) in ('DIR_METADATA', 'OWNERS'))
affected_files = set([ affected_files = {
f.AbsoluteLocalPath() f.AbsoluteLocalPath()
for f in input_api.change.AffectedFiles( for f in input_api.change.AffectedFiles(
include_deletes=False, file_filter=file_filter) include_deletes=False, file_filter=file_filter)
]) }
if not affected_files: if not affected_files:
return [] return []
...@@ -1145,11 +1147,11 @@ def CheckOwnersDirMetadataExclusive(input_api, output_api): ...@@ -1145,11 +1147,11 @@ def CheckOwnersDirMetadataExclusive(input_api, output_api):
input_api.re.MULTILINE) input_api.re.MULTILINE)
file_filter = ( file_filter = (
lambda f: input_api.basename(f.LocalPath()) in ('OWNERS', 'DIR_METADATA')) lambda f: input_api.basename(f.LocalPath()) in ('OWNERS', 'DIR_METADATA'))
affected_dirs = set([ affected_dirs = {
input_api.os_path.dirname(f.AbsoluteLocalPath()) input_api.os_path.dirname(f.AbsoluteLocalPath())
for f in input_api.change.AffectedFiles( for f in input_api.change.AffectedFiles(
include_deletes=False, file_filter=file_filter) include_deletes=False, file_filter=file_filter)
]) }
errors = [] errors = []
for path in affected_dirs: for path in affected_dirs:
...@@ -1173,11 +1175,11 @@ def CheckOwnersDirMetadataExclusive(input_api, output_api): ...@@ -1173,11 +1175,11 @@ def CheckOwnersDirMetadataExclusive(input_api, output_api):
def CheckOwnersFormat(input_api, output_api): def CheckOwnersFormat(input_api, output_api):
if input_api.gerrit and input_api.gerrit.IsCodeOwnersEnabledOnRepo(): if input_api.gerrit and input_api.gerrit.IsCodeOwnersEnabledOnRepo():
return [] return []
affected_files = set([ affected_files = {
f.LocalPath() f.LocalPath()
for f in input_api.change.AffectedFiles() for f in input_api.change.AffectedFiles()
if 'OWNERS' in f.LocalPath() and f.Action() != 'D' if 'OWNERS' in f.LocalPath() and f.Action() != 'D'
]) }
if not affected_files: if not affected_files:
return [] return []
try: try:
...@@ -1202,8 +1204,9 @@ def CheckOwners( ...@@ -1202,8 +1204,9 @@ def CheckOwners(
if input_api.gerrit and input_api.gerrit.IsCodeOwnersEnabledOnRepo(): if input_api.gerrit and input_api.gerrit.IsCodeOwnersEnabledOnRepo():
return [] return []
affected_files = set([f.LocalPath() for f in affected_files = {f.LocalPath() for f in
input_api.change.AffectedFiles(file_filter=source_file_filter)]) input_api.change.AffectedFiles(
file_filter=source_file_filter)}
owner_email, reviewers = GetCodereviewOwnerAndReviewers( owner_email, reviewers = GetCodereviewOwnerAndReviewers(
input_api, approval_needed=input_api.is_committing) input_api, approval_needed=input_api.is_committing)
...@@ -1735,7 +1738,7 @@ def CheckChangedLUCIConfigs(input_api, output_api): ...@@ -1735,7 +1738,7 @@ def CheckChangedLUCIConfigs(input_api, output_api):
sev = msg['severity'] sev = msg['severity']
if sev == 'WARNING': if sev == 'WARNING':
out_f = output_api.PresubmitPromptWarning out_f = output_api.PresubmitPromptWarning
elif sev == 'ERROR' or sev == 'CRITICAL': elif sev in ('ERROR', 'CRITICAL'):
out_f = output_api.PresubmitError out_f = output_api.PresubmitError
else: else:
out_f = output_api.PresubmitNotifyResult out_f = output_api.PresubmitNotifyResult
......
...@@ -124,7 +124,7 @@ class MockInputApi(object): ...@@ -124,7 +124,7 @@ class MockInputApi(object):
if file_.LocalPath() == filename: if file_.LocalPath() == filename:
return '\n'.join(file_.NewContents()) return '\n'.join(file_.NewContents())
# Otherwise, file is not in our mock API. # Otherwise, file is not in our mock API.
raise IOError, "No such file or directory: '%s'" % filename raise IOError("No such file or directory: '%s'" % filename)
class MockOutputApi(object): class MockOutputApi(object):
......
...@@ -330,9 +330,11 @@ class _PresubmitResult(object): ...@@ -330,9 +330,11 @@ class _PresubmitResult(object):
""" """
if isinstance(val, str): if isinstance(val, str):
return val return val
if six.PY2 and isinstance(val, unicode): if six.PY2 and isinstance(val, unicode):
return val.encode() return val.encode()
elif six.PY3 and isinstance(val, bytes):
if six.PY3 and isinstance(val, bytes):
return val.decode() return val.decode()
raise ValueError("Unknown string type %s" % type(val)) raise ValueError("Unknown string type %s" % type(val))
...@@ -379,7 +381,6 @@ class _PresubmitPromptWarning(_PresubmitResult): ...@@ -379,7 +381,6 @@ class _PresubmitPromptWarning(_PresubmitResult):
# Public access through OutputApi object. # Public access through OutputApi object.
class _PresubmitNotifyResult(_PresubmitResult): class _PresubmitNotifyResult(_PresubmitResult):
"""Just print something to the screen -- but it's not even a warning.""" """Just print something to the screen -- but it's not even a warning."""
pass
# Top level object so multiprocessing can pickle # Top level object so multiprocessing can pickle
...@@ -1282,7 +1283,7 @@ class Change(object): ...@@ -1282,7 +1283,7 @@ class Change(object):
def owners_file_filter(f): def owners_file_filter(f):
return 'OWNERS' in os.path.split(f.LocalPath())[1] return 'OWNERS' in os.path.split(f.LocalPath())[1]
files = self.AffectedFiles(file_filter=owners_file_filter) files = self.AffectedFiles(file_filter=owners_file_filter)
return dict([(f.LocalPath(), f.OldContents()) for f in files]) return {f.LocalPath(): f.OldContents() for f in files}
class GitChange(Change): class GitChange(Change):
...@@ -1313,7 +1314,7 @@ def ListRelevantPresubmitFiles(files, root): ...@@ -1313,7 +1314,7 @@ def ListRelevantPresubmitFiles(files, root):
files = [normpath(os.path.join(root, f)) for f in files] files = [normpath(os.path.join(root, f)) for f in files]
# List all the individual directories containing files. # List all the individual directories containing files.
directories = set([os.path.dirname(f) for f in files]) directories = {os.path.dirname(f) for f in files}
# Ignore root if inherit-review-settings-ok is present. # Ignore root if inherit-review-settings-ok is present.
if os.path.isfile(os.path.join(root, 'inherit-review-settings-ok')): if os.path.isfile(os.path.join(root, 'inherit-review-settings-ok')):
...@@ -1746,7 +1747,7 @@ def DoPresubmitChecks(change, ...@@ -1746,7 +1747,7 @@ def DoPresubmitChecks(change,
global _ASKED_FOR_FEEDBACK global _ASKED_FOR_FEEDBACK
# Ask for feedback one time out of 5. # Ask for feedback one time out of 5.
if (len(results) and random.randint(0, 4) == 0 and not _ASKED_FOR_FEEDBACK): if (results and random.randint(0, 4) == 0 and not _ASKED_FOR_FEEDBACK):
sys.stdout.write( sys.stdout.write(
'Was the presubmit check useful? If not, run "git cl presubmit -v"\n' 'Was the presubmit check useful? If not, run "git cl presubmit -v"\n'
'to figure out which PRESUBMIT.py was run, then run git blame\n' 'to figure out which PRESUBMIT.py was run, then run git blame\n'
......
...@@ -71,23 +71,23 @@ def determine_scm(root): ...@@ -71,23 +71,23 @@ def determine_scm(root):
""" """
if os.path.isdir(os.path.join(root, '.git')): if os.path.isdir(os.path.join(root, '.git')):
return 'git' return 'git'
else:
try: try:
subprocess2.check_call( subprocess2.check_call(
['git', 'rev-parse', '--show-cdup'], ['git', 'rev-parse', '--show-cdup'],
stdout=subprocess2.DEVNULL, stdout=subprocess2.DEVNULL,
stderr=subprocess2.DEVNULL, stderr=subprocess2.DEVNULL,
cwd=root) cwd=root)
return 'git' return 'git'
except (OSError, subprocess2.CalledProcessError): except (OSError, subprocess2.CalledProcessError):
return None return None
def only_int(val): def only_int(val):
if val.isdigit(): if val.isdigit():
return int(val) return int(val)
else:
return 0 return 0
class GIT(object): class GIT(object):
...@@ -261,7 +261,8 @@ class GIT(object): ...@@ -261,7 +261,8 @@ class GIT(object):
if 'origin/main' in remote_branches: if 'origin/main' in remote_branches:
# Fall back on origin/main if it exits. # Fall back on origin/main if it exits.
return 'origin', 'refs/heads/main' return 'origin', 'refs/heads/main'
elif 'origin/master' in remote_branches:
if 'origin/master' in remote_branches:
# Fall back on origin/master if it exits. # Fall back on origin/master if it exits.
return 'origin', 'refs/heads/master' return 'origin', 'refs/heads/master'
......
...@@ -147,11 +147,11 @@ class CommandDispatcher(object): ...@@ -147,11 +147,11 @@ class CommandDispatcher(object):
reverse=True) reverse=True)
if (hamming_commands[0][0] - hamming_commands[1][0]) < 0.3: if (hamming_commands[0][0] - hamming_commands[1][0]) < 0.3:
# Too ambiguous. # Too ambiguous.
return return None
if hamming_commands[0][0] < 0.8: if hamming_commands[0][0] < 0.8:
# Not similar enough. Don't be a fool and run a random command. # Not similar enough. Don't be a fool and run a random command.
return return None
return commands[hamming_commands[0][1]] return commands[hamming_commands[0][1]]
......
...@@ -49,7 +49,7 @@ def read_tree(tree_root): ...@@ -49,7 +49,7 @@ def read_tree(tree_root):
dirs.remove(d) dirs.remove(d)
for f in [join(root, f) for f in files if not f.startswith('.')]: for f in [join(root, f) for f in files if not f.startswith('.')]:
filepath = f[len(tree_root) + 1:].replace(os.sep, '/') filepath = f[len(tree_root) + 1:].replace(os.sep, '/')
assert len(filepath), f assert len(filepath) > 0, f
with io.open(join(root, f), encoding='utf-8') as f: with io.open(join(root, f), encoding='utf-8') as f:
tree[filepath] = f.read() tree[filepath] = f.read()
return tree return tree
......
...@@ -9,8 +9,6 @@ import os ...@@ -9,8 +9,6 @@ import os
import sys import sys
import unittest import unittest
#import test_env # pylint: disable=relative-import,unused-import
sys.path.insert(0, os.path.join( sys.path.insert(0, os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))), os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'recipes', 'recipe_modules', 'bot_update', 'resources')) 'recipes', 'recipe_modules', 'bot_update', 'resources'))
......
...@@ -59,8 +59,8 @@ class GsutilMock(object): ...@@ -59,8 +59,8 @@ class GsutilMock(object):
if fn: if fn:
fn() fn()
return code return code
else:
return 0 return 0
def check_call(self, *args): def check_call(self, *args):
with self.lock: with self.lock:
...@@ -70,8 +70,8 @@ class GsutilMock(object): ...@@ -70,8 +70,8 @@ class GsutilMock(object):
if fn: if fn:
fn() fn()
return code, out, err return code, out, err
else:
return (0, '', '') return (0, '', '')
def check_call_with_retries(self, *args): def check_call_with_retries(self, *args):
return self.check_call(*args) return self.check_call(*args)
......
...@@ -165,10 +165,10 @@ from :3 ...@@ -165,10 +165,10 @@ from :3
return self.OptionsObject(*args, **kwargs) return self.OptionsObject(*args, **kwargs)
def checkstdout(self, expected): def checkstdout(self, expected):
# pylint: disable=no-member
value = sys.stdout.getvalue() value = sys.stdout.getvalue()
sys.stdout.close() sys.stdout.close()
# Check that the expected output appears. # Check that the expected output appears.
# pylint: disable=no-member
self.assertIn(expected, strip_timestamps(value)) self.assertIn(expected, strip_timestamps(value))
@staticmethod @staticmethod
...@@ -597,10 +597,10 @@ class ManagedGitWrapperTestCaseMock(unittest.TestCase): ...@@ -597,10 +597,10 @@ class ManagedGitWrapperTestCaseMock(unittest.TestCase):
return self.OptionsObject(*args, **kwargs) return self.OptionsObject(*args, **kwargs)
def checkstdout(self, expected): def checkstdout(self, expected):
# pylint: disable=no-member
value = sys.stdout.getvalue() value = sys.stdout.getvalue()
sys.stdout.close() sys.stdout.close()
# Check that the expected output appears. # Check that the expected output appears.
# pylint: disable=no-member
self.assertIn(expected, strip_timestamps(value)) self.assertIn(expected, strip_timestamps(value))
def setUp(self): def setUp(self):
...@@ -711,15 +711,15 @@ class ManagedGitWrapperTestCaseMock(unittest.TestCase): ...@@ -711,15 +711,15 @@ class ManagedGitWrapperTestCaseMock(unittest.TestCase):
class UnmanagedGitWrapperTestCase(BaseGitWrapperTestCase): class UnmanagedGitWrapperTestCase(BaseGitWrapperTestCase):
def checkInStdout(self, expected): def checkInStdout(self, expected):
# pylint: disable=no-member
value = sys.stdout.getvalue() value = sys.stdout.getvalue()
sys.stdout.close() sys.stdout.close()
# pylint: disable=no-member
self.assertIn(expected, value) self.assertIn(expected, value)
def checkNotInStdout(self, expected): def checkNotInStdout(self, expected):
# pylint: disable=no-member
value = sys.stdout.getvalue() value = sys.stdout.getvalue()
sys.stdout.close() sys.stdout.close()
# pylint: disable=no-member
self.assertNotIn(expected, value) self.assertNotIn(expected, value)
def getCurrentBranch(self): def getCurrentBranch(self):
......
...@@ -143,5 +143,3 @@ class GClientSmokeBase(fake_repos.FakeReposTestBase): ...@@ -143,5 +143,3 @@ class GClientSmokeBase(fake_repos.FakeReposTestBase):
self.checkString(results[i][0][2], path, (i, results[i][0][2], path)) self.checkString(results[i][0][2], path, (i, results[i][0][2], path))
self.assertEqual(len(results), len(items), (stdout, items, len(results))) self.assertEqual(len(results), len(items), (stdout, items, len(results)))
return results return results
...@@ -42,9 +42,11 @@ class TestGitFindReleases(unittest.TestCase): ...@@ -42,9 +42,11 @@ class TestGitFindReleases(unittest.TestCase):
assert len(args) > 1 assert len(args) > 1
if args[0] == 'name-rev' and args[1] == '--tags': if args[0] == 'name-rev' and args[1] == '--tags':
return 'undefined' return 'undefined'
elif args[0] == 'name-rev' and args[1] == '--refs':
if args[0] == 'name-rev' and args[1] == '--refs':
return '1.0.0' return '1.0.0'
elif args[0] == 'log':
if args[0] == 'log':
return '' return ''
assert False, "Unexpected arguments for git.run" assert False, "Unexpected arguments for git.run"
......
...@@ -53,11 +53,14 @@ class CMDFormatTestCase(unittest.TestCase): ...@@ -53,11 +53,14 @@ class CMDFormatTestCase(unittest.TestCase):
def RunMock(*args): def RunMock(*args):
if args[0] == 'remote': if args[0] == 'remote':
return 'origin\ngerrit' return 'origin\ngerrit'
elif args[0] == 'fetch':
if args[0] == 'fetch':
return return
elif args[0] == 'branch':
if args[0] == 'branch':
return return
elif args[0] == 'config':
if args[0] == 'config':
return return
raise Exception('Did not expect such run git command: %s' % args[0]) raise Exception('Did not expect such run git command: %s' % args[0])
......
...@@ -1261,7 +1261,7 @@ class InputApiUnittest(PresubmitTestsBase): ...@@ -1261,7 +1261,7 @@ class InputApiUnittest(PresubmitTestsBase):
# Ignores weird because of check_list, third_party because of skip_list, # Ignores weird because of check_list, third_party because of skip_list,
# binary isn't a text file and being deleted doesn't exist. The rest is # binary isn't a text file and being deleted doesn't exist. The rest is
# outside foo/. # outside foo/.
rhs_lines = [x for x in input_api.RightHandSideLines(None)] rhs_lines = list(input_api.RightHandSideLines(None))
self.assertEqual(len(rhs_lines), 14) self.assertEqual(len(rhs_lines), 14)
self.assertEqual(rhs_lines[0][0].LocalPath(), self.assertEqual(rhs_lines[0][0].LocalPath(),
presubmit.normpath(files[0][1])) presubmit.normpath(files[0][1]))
...@@ -2282,8 +2282,8 @@ the current line as well! ...@@ -2282,8 +2282,8 @@ the current line as well!
"print('foo')\n" "print('foo')\n"
) )
license_text = ( license_text = (
r".*? Copyright \(c\) 2037 Nobody." "\n" r".*? Copyright \(c\) 2037 Nobody.\n"
r".*? All Rights Reserved\." "\n" r".*? All Rights Reserved\.\n"
) )
self._LicenseCheck(text, license_text, True, None) self._LicenseCheck(text, license_text, True, None)
...@@ -2295,8 +2295,8 @@ the current line as well! ...@@ -2295,8 +2295,8 @@ the current line as well!
"print('foo')\n" "print('foo')\n"
) )
license_text = ( license_text = (
r".*? Copyright \(c\) 0007 Nobody." "\n" r".*? Copyright \(c\) 0007 Nobody.\n"
r".*? All Rights Reserved\." "\n" r".*? All Rights Reserved\.\n"
) )
self._LicenseCheck(text, license_text, True, self._LicenseCheck(text, license_text, True,
presubmit.OutputApi.PresubmitPromptWarning) presubmit.OutputApi.PresubmitPromptWarning)
...@@ -2309,8 +2309,8 @@ the current line as well! ...@@ -2309,8 +2309,8 @@ the current line as well!
"print('foo')\n" "print('foo')\n"
) )
license_text = ( license_text = (
r".*? Copyright \(c\) 0007 Nobody." "\n" r".*? Copyright \(c\) 0007 Nobody.\n"
r".*? All Rights Reserved\." "\n" r".*? All Rights Reserved\.\n"
) )
self._LicenseCheck(text, license_text, False, self._LicenseCheck(text, license_text, False,
presubmit.OutputApi.PresubmitPromptWarning) presubmit.OutputApi.PresubmitPromptWarning)
...@@ -2318,8 +2318,8 @@ the current line as well! ...@@ -2318,8 +2318,8 @@ the current line as well!
def testCheckLicenseEmptySuccess(self): def testCheckLicenseEmptySuccess(self):
text = '' text = ''
license_text = ( license_text = (
r".*? Copyright \(c\) 2037 Nobody." "\n" r".*? Copyright \(c\) 2037 Nobody.\n"
r".*? All Rights Reserved\." "\n" r".*? All Rights Reserved\.\n"
) )
self._LicenseCheck(text, license_text, True, None, accept_empty_files=True) self._LicenseCheck(text, license_text, True, None, accept_empty_files=True)
...@@ -2449,14 +2449,14 @@ the current line as well! ...@@ -2449,14 +2449,14 @@ the current line as well!
subprocess.Popen.return_value = process subprocess.Popen.return_value = process
presubmit.sigint_handler.wait.return_value = (b'', None) presubmit.sigint_handler.wait.return_value = (b'', None)
pylint = os.path.join(_ROOT, 'pylint-1.5') pylint = os.path.join(_ROOT, 'pylint-2.7')
pylintrc = os.path.join(_ROOT, 'pylintrc') pylintrc = os.path.join(_ROOT, 'pylintrc')
env = {str('PYTHONPATH'): str('')} env = {str('PYTHONPATH'): str('')}
if sys.platform == 'win32': if sys.platform == 'win32':
pylint += '.bat' pylint += '.bat'
results = presubmit_canned_checks.RunPylint( results = presubmit_canned_checks.RunPylint(
input_api, presubmit.OutputApi) input_api, presubmit.OutputApi, version='2.7')
self.assertEqual([], results) self.assertEqual([], results)
self.assertEqual(subprocess.Popen.mock_calls, [ self.assertEqual(subprocess.Popen.mock_calls, [
......
...@@ -141,6 +141,7 @@ class SmokeTests(unittest.TestCase): ...@@ -141,6 +141,7 @@ class SmokeTests(unittest.TestCase):
def test_check_output_no_stdout(self): def test_check_output_no_stdout(self):
for subp in (subprocess, subprocess2): for subp in (subprocess, subprocess2):
with self.assertRaises(ValueError): with self.assertRaises(ValueError):
# pylint: disable=unexpected-keyword-arg
subp.check_output(TEST_COMMAND, stdout=subp.PIPE) subp.check_output(TEST_COMMAND, stdout=subp.PIPE)
def test_print_exception(self): def test_print_exception(self):
......
...@@ -135,10 +135,10 @@ def get_targets(args, parser, use_null_terminator): ...@@ -135,10 +135,10 @@ def get_targets(args, parser, use_null_terminator):
# Take stdin as a newline or null separated list of files. # Take stdin as a newline or null separated list of files.
if use_null_terminator: if use_null_terminator:
return sys.stdin.read().split('\0') return sys.stdin.read().split('\0')
else:
return sys.stdin.read().splitlines() return sys.stdin.read().splitlines()
else:
return args return args
def upload_to_google_storage( def upload_to_google_storage(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment