Commit c5f0cbb8 authored by Aravind Vasudevan's avatar Aravind Vasudevan Committed by LUCI CQ

Use pylint 2.7 for depot_tools

This includes a few fixes for specific errors, and disables several new
warnings introduced in this version, in order to allow for an incremental migration.

Bug:1262286
Change-Id: I4b8f8fc521386419a3121bbb07edc8ac83170a94
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/3413679Reviewed-by: 's avatarJosip Sokcevic <sokcevic@google.com>
Commit-Queue: Aravind Vasudevan <aravindvasudev@google.com>
parent d94f9a6d
......@@ -57,15 +57,27 @@ def CheckPylint(input_api, output_api):
files_to_skip.extend([fnmatch.translate(l) for l in lines if
l and not l.startswith('#')])
disabled_warnings = [
'R0401', # Cyclic import
'W0613', # Unused argument
'R0401', # Cyclic import
'W0613', # Unused argument
'C0415', # import-outside-toplevel
'R1710', # inconsistent-return-statements
'E1101', # no-member
'E1120', # no-value-for-parameter
'R1708', # stop-iteration-return
'W1510', # subprocess-run-check
# Checks which should be re-enabled after Python 2 support is removed.
'R0205', # useless-object-inheritance
'R1725', # super-with-arguments
'W0707', # raise-missing-from
'W1113', # keyword-arg-before-vararg
]
return input_api.RunTests(input_api.canned_checks.GetPylint(
input_api,
output_api,
files_to_check=files_to_check,
files_to_skip=files_to_skip,
disabled_warnings=disabled_warnings))
disabled_warnings=disabled_warnings,
version='2.7'), parallel=False)
def CheckRecipes(input_api, output_api):
......
......@@ -51,7 +51,7 @@ for index, arg in enumerate(input_args[1:]):
elif arg.startswith('-C'):
# Support -Cout/Default
output_dir = arg[2:]
elif arg == '-o' or arg == '--offline':
elif arg in ('-o', '--offline'):
offline = True
elif arg == '-h':
print('autoninja: Use -o/--offline to temporary disable goma.',
......@@ -59,7 +59,7 @@ for index, arg in enumerate(input_args[1:]):
print(file=sys.stderr)
# Strip -o/--offline so ninja doesn't see them.
input_args = [ arg for arg in input_args if arg != '-o' and arg != '--offline']
input_args = [ arg for arg in input_args if arg not in ('-o', '--offline')]
use_goma = False
use_remoteexec = False
......
......@@ -83,8 +83,8 @@ def is_exe(filename):
"""Given a full filepath, return true if the file is an executable."""
if sys.platform.startswith('win'):
return filename.endswith('.exe')
else:
return os.path.isfile(filename) and os.access(filename, os.X_OK)
return os.path.isfile(filename) and os.access(filename, os.X_OK)
def get_available_tools():
......
......@@ -28,7 +28,9 @@ def main():
sys.argv[0])
sys.exit(1)
# pylint: disable=unbalanced-tuple-unpacking
base, current, others, file_name_in_tree = sys.argv[1:5]
# pylint: enable=unbalanced-tuple-unpacking
if file_name_in_tree == '%P':
print(file=sys.stderr)
......
......@@ -23,7 +23,7 @@ def path_to_source_root(path):
# to break when we rename directories.
fingerprints = ['chrome', 'net', 'v8', 'build', 'skia']
while candidate and not all(
[os.path.isdir(os.path.join(candidate, fp)) for fp in fingerprints]):
os.path.isdir(os.path.join(candidate, fp)) for fp in fingerprints):
new_candidate = os.path.dirname(candidate)
if new_candidate == candidate:
raise Exception("Couldn't find source-dir from %s" % path)
......
......@@ -5263,8 +5263,8 @@ _RE_PATTERN_STRING = re.compile(r'\bstring\b')
_re_pattern_headers_maybe_templates = []
for _header, _templates in _HEADERS_MAYBE_TEMPLATES:
for _template in _templates:
# Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max or
# type::max().
# Match max<type>(..., ...), max(..., ...), but not foo->max, foo.max
# or type::max().
_re_pattern_headers_maybe_templates.append(
(re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'),
_template,
......@@ -5380,9 +5380,9 @@ def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error,
io: The IO factory to use to read the header file. Provided for unittest
injection.
"""
required = {} # A map of header name to linenumber and the template entity.
# Example of required: { '<functional>': (1219, 'less<>') }
# A map of header name to linenumber and the template entity.
# Example of required: { '<functional>': (1219, 'less<>') }
required = {}
for linenum in range(clean_lines.NumLines()):
line = clean_lines.elided[linenum]
if not line or line[0] == '#':
......@@ -5865,9 +5865,9 @@ def ProcessConfigOverrides(filename):
elif name == 'linelength':
global _line_length
try:
_line_length = int(val)
_line_length = int(val)
except ValueError:
sys.stderr.write('Line length must be numeric.')
sys.stderr.write('Line length must be numeric.')
else:
sys.stderr.write(
'Invalid configuration option (%s) in file %s\n' %
......@@ -5881,7 +5881,7 @@ def ProcessConfigOverrides(filename):
# Apply all the accumulated filters in reverse order (top-level directory
# config options having the least priority).
for filter in reversed(cfg_filters):
_AddFilters(filter)
_AddFilters(filter)
return True
......@@ -6053,15 +6053,15 @@ def ParseArguments(args):
elif opt == '--linelength':
global _line_length
try:
_line_length = int(val)
_line_length = int(val)
except ValueError:
PrintUsage('Line length must be digits.')
PrintUsage('Line length must be digits.')
elif opt == '--extensions':
global _valid_extensions
try:
_valid_extensions = set(val.split(','))
_valid_extensions = set(val.split(','))
except ValueError:
PrintUsage('Extensions must be comma separated list.')
PrintUsage('Extensions must be comma separated list.')
if not filenames:
PrintUsage('No files were specified.')
......
......@@ -44,9 +44,10 @@ PLATFORM_MAPPING = {
'aix7': 'aix',
}
class FileNotFoundError(IOError):
pass
if sys.version_info.major == 2:
# pylint: disable=redefined-builtin
class FileNotFoundError(IOError):
pass
class InvalidFileError(IOError):
......
......@@ -56,7 +56,6 @@ class Checkout(object):
def exists(self):
"""Check does this checkout already exist on desired location"""
pass
def init(self):
pass
......@@ -67,18 +66,18 @@ class Checkout(object):
return ''
if return_stdout:
return subprocess.check_output(cmd, **kwargs).decode()
else:
try:
subprocess.check_call(cmd, **kwargs)
except subprocess.CalledProcessError as e:
# If the subprocess failed, it likely emitted its own distress message
# already - don't scroll that message off the screen with a stack trace
# from this program as well. Emit a terse message and bail out here;
# otherwise a later step will try doing more work and may hide the
# subprocess message.
print('Subprocess failed with return code %d.' % e.returncode)
sys.exit(e.returncode)
return ''
try:
subprocess.check_call(cmd, **kwargs)
except subprocess.CalledProcessError as e:
# If the subprocess failed, it likely emitted its own distress message
# already - don't scroll that message off the screen with a stack trace
# from this program as well. Emit a terse message and bail out here;
# otherwise a later step will try doing more work and may hide the
# subprocess message.
print('Subprocess failed with return code %d.' % e.returncode)
sys.exit(e.returncode)
return ''
class GclientCheckout(Checkout):
......
......@@ -238,7 +238,7 @@ class Hook(object):
not gclient_eval.EvaluateCondition(self._condition, self._variables)):
return
cmd = [arg for arg in self._action]
cmd = list(self._action)
if cmd[0] == 'python':
cmd[0] = 'vpython'
......@@ -373,8 +373,8 @@ class DependencySettings(object):
def target_os(self):
if self.local_target_os is not None:
return tuple(set(self.local_target_os).union(self.parent.target_os))
else:
return self.parent.target_os
return self.parent.target_os
@property
def target_cpu(self):
......@@ -1420,7 +1420,7 @@ solutions = %(solution_list)s
if 'all' in enforced_os:
enforced_os = self.DEPS_OS_CHOICES.values()
self._enforced_os = tuple(set(enforced_os))
self._enforced_cpu = detect_host_arch.HostArch(),
self._enforced_cpu = (detect_host_arch.HostArch(), )
self._root_dir = root_dir
self._cipd_root = None
self.config_content = None
......@@ -1830,7 +1830,7 @@ it or fix the checkout.
if command == 'update':
gn_args_dep = self.dependencies[0]
if gn_args_dep._gn_args_from:
deps_map = dict([(dep.name, dep) for dep in gn_args_dep.dependencies])
deps_map = {dep.name: dep for dep in gn_args_dep.dependencies}
gn_args_dep = deps_map.get(gn_args_dep._gn_args_from)
if gn_args_dep and gn_args_dep.HasGNArgsFile():
gn_args_dep.WriteGNArgsFile()
......@@ -1874,11 +1874,12 @@ it or fix the checkout.
entries = {}
def GrabDeps(dep):
"""Recursively grab dependencies."""
for d in dep.dependencies:
d.PinToActualRevision()
if ShouldPrintRevision(d):
entries[d.name] = d.url
GrabDeps(d)
for rec_d in dep.dependencies:
rec_d.PinToActualRevision()
if ShouldPrintRevision(rec_d):
entries[rec_d.name] = rec_d.url
GrabDeps(rec_d)
GrabDeps(d)
json_output.append({
'name': d.name,
......@@ -2275,7 +2276,7 @@ class Flattener(object):
if key not in self._vars:
continue
# Don't "override" existing vars if it's actually the same value.
elif self._vars[key][1] == value:
if self._vars[key][1] == value:
continue
# Anything else is overriding a default value from the DEPS.
self._vars[key] = (hierarchy + ' [custom_var override]', value)
......
......@@ -38,8 +38,8 @@ class ConstantString(object):
def __eq__(self, other):
if isinstance(other, ConstantString):
return self.value == other.value
else:
return self.value == other
return self.value == other
def __hash__(self):
return self.value.__hash__()
......@@ -304,13 +304,14 @@ def _gclient_eval(node_or_string, filename='<unknown>', vars_dict=None):
raise ValueError(
'%s takes exactly one argument (file %r, line %s)' % (
node.func.id, filename, getattr(node, 'lineno', '<unknown>')))
if node.func.id == 'Str':
if isinstance(node.args[0], ast.Str):
return ConstantString(node.args[0].s)
raise ValueError('Passed a non-string to Str() (file %r, line%s)' % (
filename, getattr(node, 'lineno', '<unknown>')))
else:
arg = _convert(node.args[0])
arg = _convert(node.args[0])
if not isinstance(arg, basestring):
raise ValueError(
'Var\'s argument must be a variable name (file %r, line %s)' % (
......@@ -540,16 +541,20 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
def _convert(node, allow_tuple=False):
if isinstance(node, ast.Str):
return node.s
elif isinstance(node, ast.Tuple) and allow_tuple:
if isinstance(node, ast.Tuple) and allow_tuple:
return tuple(map(_convert, node.elts))
elif isinstance(node, ast.Name):
if isinstance(node, ast.Name):
if node.id in referenced_variables:
raise ValueError(
'invalid cyclic reference to %r (inside %r)' % (
node.id, condition))
elif node.id in _allowed_names:
if node.id in _allowed_names:
return _allowed_names[node.id]
elif node.id in variables:
if node.id in variables:
value = variables[node.id]
# Allow using "native" types, without wrapping everything in strings.
......@@ -562,16 +567,18 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
variables[node.id],
variables,
referenced_variables.union([node.id]))
else:
# Implicitly convert unrecognized names to strings.
# If we want to change this, we'll need to explicitly distinguish
# between arguments for GN to be passed verbatim, and ones to
# be evaluated.
return node.id
elif not sys.version_info[:2] < (3, 4) and isinstance(
# Implicitly convert unrecognized names to strings.
# If we want to change this, we'll need to explicitly distinguish
# between arguments for GN to be passed verbatim, and ones to
# be evaluated.
return node.id
if not sys.version_info[:2] < (3, 4) and isinstance(
node, ast.NameConstant): # Since Python 3.4
return node.value
elif isinstance(node, ast.BoolOp) and isinstance(node.op, ast.Or):
if isinstance(node, ast.BoolOp) and isinstance(node.op, ast.Or):
bool_values = []
for value in node.values:
bool_values.append(_convert(value))
......@@ -580,7 +587,8 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
'invalid "or" operand %r (inside %r)' % (
bool_values[-1], condition))
return any(bool_values)
elif isinstance(node, ast.BoolOp) and isinstance(node.op, ast.And):
if isinstance(node, ast.BoolOp) and isinstance(node.op, ast.And):
bool_values = []
for value in node.values:
bool_values.append(_convert(value))
......@@ -589,13 +597,15 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
'invalid "and" operand %r (inside %r)' % (
bool_values[-1], condition))
return all(bool_values)
elif isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not):
if isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not):
value = _convert(node.operand)
if not isinstance(value, bool):
raise ValueError(
'invalid "not" operand %r (inside %r)' % (value, condition))
return not value
elif isinstance(node, ast.Compare):
if isinstance(node, ast.Compare):
if len(node.ops) != 1:
raise ValueError(
'invalid compare: exactly 1 operator required (inside %r)' % (
......@@ -619,10 +629,10 @@ def EvaluateCondition(condition, variables, referenced_variables=None):
raise ValueError(
'unexpected operator: %s %s (inside %r)' % (
node.ops[0], ast.dump(node), condition))
else:
raise ValueError(
'unexpected AST node: %s %s (inside %r)' % (
node, ast.dump(node), condition))
raise ValueError(
'unexpected AST node: %s %s (inside %r)' % (
node, ast.dump(node), condition))
return _convert(main_node)
......@@ -738,7 +748,8 @@ def SetVar(gclient_dict, var_name, value):
def _GetVarName(node):
if isinstance(node, ast.Call):
return node.args[0].s
elif node.s.endswith('}'):
if node.s.endswith('}'):
last_brace = node.s.rfind('{')
return node.s[last_brace+1:-1]
return None
......@@ -880,12 +891,14 @@ def GetRevision(gclient_dict, dep_name):
dep = gclient_dict['deps'][dep_name]
if dep is None:
return None
elif isinstance(dep, basestring):
if isinstance(dep, basestring):
_, _, revision = dep.partition('@')
return revision or None
elif isinstance(dep, collections_abc.Mapping) and 'url' in dep:
if isinstance(dep, collections_abc.Mapping) and 'url' in dep:
_, _, revision = dep['url'].partition('@')
return revision or None
else:
raise ValueError(
'%s is not a valid git dependency.' % dep_name)
raise ValueError(
'%s is not a valid git dependency.' % dep_name)
......@@ -165,10 +165,10 @@ class SCMWrapper(object):
if actual_remote_url:
return (gclient_utils.SplitUrlRevision(actual_remote_url)[0].rstrip('/')
== gclient_utils.SplitUrlRevision(self.url)[0].rstrip('/'))
else:
# This may occur if the self.checkout_path exists but does not contain a
# valid git checkout.
return False
# This may occur if the self.checkout_path exists but does not contain a
# valid git checkout.
return False
def _DeleteOrMove(self, force):
"""Delete the checkout directory or move it out of the way.
......@@ -381,7 +381,8 @@ class GitWrapper(SCMWrapper):
if not target_rev:
raise gclient_utils.Error('A target revision for the patch must be given')
elif target_rev.startswith(('refs/heads/', 'refs/branch-heads')):
if target_rev.startswith(('refs/heads/', 'refs/branch-heads')):
# If |target_rev| is in refs/heads/** or refs/branch-heads/**, try first
# to find the corresponding remote ref for it, since |target_rev| might
# point to a local ref which is not up to date with the corresponding
......@@ -781,16 +782,18 @@ class GitWrapper(SCMWrapper):
printed_path=printed_path, merge=False)
printed_path = True
break
elif re.match(r'quit|q', action, re.I):
if re.match(r'quit|q', action, re.I):
raise gclient_utils.Error("Can't fast-forward, please merge or "
"rebase manually.\n"
"cd %s && git " % self.checkout_path
+ "rebase %s" % upstream_branch)
elif re.match(r'skip|s', action, re.I):
if re.match(r'skip|s', action, re.I):
self.Print('Skipping %s' % self.relpath)
return
else:
self.Print('Input not recognized')
self.Print('Input not recognized')
elif re.match(b"error: Your local changes to '.*' would be "
b"overwritten by merge. Aborting.\nPlease, commit your "
b"changes or stash them before you can merge.\n",
......@@ -1137,16 +1140,18 @@ class GitWrapper(SCMWrapper):
# Should this be recursive?
rebase_output = scm.GIT.Capture(rebase_cmd, cwd=self.checkout_path)
break
elif re.match(r'quit|q', rebase_action, re.I):
if re.match(r'quit|q', rebase_action, re.I):
raise gclient_utils.Error("Please merge or rebase manually\n"
"cd %s && git " % self.checkout_path
+ "%s" % ' '.join(rebase_cmd))
elif re.match(r'show|s', rebase_action, re.I):
if re.match(r'show|s', rebase_action, re.I):
self.Print('%s' % e.stderr.decode('utf-8').strip())
continue
else:
gclient_utils.Error("Input not recognized")
continue
gclient_utils.Error("Input not recognized")
continue
elif re.search(br'^CONFLICT', e.stdout, re.M):
raise gclient_utils.Error("Conflict while rebasing this branch.\n"
"Fix the conflict and run gclient again.\n"
......@@ -1559,15 +1564,12 @@ class CipdWrapper(SCMWrapper):
CIPD packages should be reverted at the root by running
`CipdRoot.run('revert')`.
"""
pass
def diff(self, options, args, file_list):
"""CIPD has no notion of diffing."""
pass
def pack(self, options, args, file_list):
"""CIPD has no notion of diffing."""
pass
def revinfo(self, options, args, file_list):
"""Grab the instance ID."""
......@@ -1597,4 +1599,3 @@ class CipdWrapper(SCMWrapper):
CIPD packages should be updated at the root by running
`CipdRoot.run('update')`.
"""
pass
......@@ -301,8 +301,8 @@ def rmtree(path):
exitcode = subprocess.call(['cmd.exe', '/c', 'rd', '/q', '/s', path])
if exitcode == 0:
return
else:
print('rd exited with code %d' % exitcode, file=sys.stderr)
print('rd exited with code %d' % exitcode, file=sys.stderr)
time.sleep(3)
raise Exception('Failed to remove path %s' % path)
......@@ -437,11 +437,12 @@ class Annotated(Wrapper):
lf_loc = obj[0].find(b'\n')
if cr_loc == lf_loc == -1:
break
elif cr_loc == -1 or (lf_loc >= 0 and lf_loc < cr_loc):
if cr_loc == -1 or (0 <= lf_loc < cr_loc):
line, remaining = obj[0].split(b'\n', 1)
if line:
self._wrapped_write(b'%d>%s\n' % (index, line))
elif lf_loc == -1 or (cr_loc >= 0 and cr_loc < lf_loc):
elif lf_loc == -1 or (0 <= cr_loc < lf_loc):
line, remaining = obj[0].split(b'\r', 1)
if line:
self._wrapped_write(b'%d>%s\r' % (index, line))
......@@ -750,12 +751,16 @@ def GetMacWinAixOrLinux():
"""Returns 'mac', 'win', or 'linux', matching the current platform."""
if sys.platform.startswith(('cygwin', 'win')):
return 'win'
elif sys.platform.startswith('linux'):
if sys.platform.startswith('linux'):
return 'linux'
elif sys.platform == 'darwin':
if sys.platform == 'darwin':
return 'mac'
elif sys.platform.startswith('aix'):
if sys.platform.startswith('aix'):
return 'aix'
raise Error('Unknown platform: ' + sys.platform)
......@@ -806,7 +811,6 @@ class WorkItem(object):
def run(self, work_queue):
"""work_queue is passed as keyword argument so it should be
the last parameters of the function when you override it."""
pass
@property
def name(self):
......@@ -1211,8 +1215,8 @@ def DefaultDeltaBaseCacheLimit():
"""
if platform.architecture()[0].startswith('64'):
return '2g'
else:
return '512m'
return '512m'
def DefaultIndexPackConfig(url=''):
......@@ -1259,13 +1263,15 @@ def freeze(obj):
"""
if isinstance(obj, collections_abc.Mapping):
return FrozenDict((freeze(k), freeze(v)) for k, v in obj.items())
elif isinstance(obj, (list, tuple)):
if isinstance(obj, (list, tuple)):
return tuple(freeze(i) for i in obj)
elif isinstance(obj, set):
if isinstance(obj, set):
return frozenset(freeze(i) for i in obj)
else:
hash(obj)
return obj
hash(obj)
return obj
class FrozenDict(collections_abc.Mapping):
......
......@@ -254,8 +254,8 @@ class CookiesAuthenticator(Authenticator):
if a[0]:
secret = base64.b64encode(('%s:%s' % (a[0], a[2])).encode('utf-8'))
return 'Basic %s' % secret.decode('utf-8')
else:
return 'Bearer %s' % a[2]
return 'Bearer %s' % a[2]
return None
def get_auth_email(self, host):
......@@ -696,7 +696,8 @@ def GetChangeReview(host, change, revision=None):
jmsg = GetChangeRevisions(host, change)
if not jmsg:
return None
elif len(jmsg) > 1:
if len(jmsg) > 1:
raise GerritError(200, 'Multiple changes found for ChangeId %s.' % change)
revision = jmsg[0]['current_revision']
path = 'changes/%s/revisions/%s/review'
......@@ -981,7 +982,8 @@ def ResetReviewLabels(host, change, label, value='0', message=None,
if not jmsg:
raise GerritError(
200, 'Could not get review information for change "%s"' % change)
elif jmsg[0]['current_revision'] != revision:
if jmsg[0]['current_revision'] != revision:
raise GerritError(200, 'While resetting labels on change "%s", '
'a new patchset was uploaded.' % change)
......@@ -1000,7 +1002,7 @@ def CreateChange(host, project, branch='main', subject='', params=()):
"""
path = 'changes/'
body = {'project': project, 'branch': branch, 'subject': subject}
body.update({k: v for k, v in params})
body.update(dict(params))
for key in 'project', 'branch', 'subject':
if not body[key]:
raise GerritError(200, '%s is required' % key.title())
......
......@@ -116,7 +116,7 @@ class Mirror(object):
def __init__(self, url, refs=None, commits=None, print_func=None):
self.url = url
self.fetch_specs = set([self.parse_fetch_spec(ref) for ref in (refs or [])])
self.fetch_specs = {self.parse_fetch_spec(ref) for ref in (refs or [])}
self.fetch_commits = set(commits or [])
self.basedir = self.UrlToCacheDir(url)
self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
......@@ -576,8 +576,7 @@ class Mirror(object):
if not prev_dest_prefix:
return
for path in ls_out_set:
if (path == prev_dest_prefix + '/' or
path == prev_dest_prefix + '.ready'):
if path in (prev_dest_prefix + '/', prev_dest_prefix + '.ready'):
continue
if path.endswith('.ready'):
gsutil.call('rm', path)
......
......@@ -638,7 +638,7 @@ def _FindYapfConfigFile(fpath, yapf_config_cache, top_dir=None):
yapf_file = os.path.join(fpath, YAPF_CONFIG_FILENAME)
if os.path.isfile(yapf_file):
ret = yapf_file
elif fpath == top_dir or parent_dir == fpath:
elif fpath in (top_dir, parent_dir):
# If we're at the top level directory, or if we're at root
# there is no provided style.
ret = None
......@@ -1720,18 +1720,18 @@ class Changelist(object):
if not force:
confirm_or_exit('If you know what you are doing', action='continue')
return
else:
missing = (
([] if gerrit_auth else [self._gerrit_host]) +
([] if git_auth else [git_host]))
DieWithError('Credentials for the following hosts are required:\n'
' %s\n'
'These are read from %s (or legacy %s)\n'
'%s' % (
'\n '.join(missing),
cookie_auth.get_gitcookies_path(),
cookie_auth.get_netrc_path(),
cookie_auth.get_new_password_message(git_host)))
missing = (
([] if gerrit_auth else [self._gerrit_host]) +
([] if git_auth else [git_host]))
DieWithError('Credentials for the following hosts are required:\n'
' %s\n'
'These are read from %s (or legacy %s)\n'
'%s' % (
'\n '.join(missing),
cookie_auth.get_gitcookies_path(),
cookie_auth.get_netrc_path(),
cookie_auth.get_new_password_message(git_host)))
def EnsureCanUploadPatchset(self, force):
if not self.GetIssue():
......@@ -1820,9 +1820,9 @@ class Changelist(object):
if m.get('author', {}).get('_account_id') == owner:
# Most recent message was by owner.
return 'waiting'
else:
# Some reply from non-owner.
return 'reply'
# Some reply from non-owner.
return 'reply'
# Somehow there are no messages even though there are reviewers.
return 'unsent'
......@@ -1845,9 +1845,9 @@ class Changelist(object):
data = self._GetChangeDetail(['ALL_REVISIONS'])
patchset = data['revisions'][data['current_revision']]['_number']
dry_run = set([int(m['_revision_number'])
for m in data.get('messages', [])
if m.get('tag', '').endswith('dry-run')])
dry_run = {int(m['_revision_number'])
for m in data.get('messages', [])
if m.get('tag', '').endswith('dry-run')}
for revision_info in sorted(data.get('revisions', {}).values(),
key=lambda c: c['_number'], reverse=True):
......@@ -2634,8 +2634,8 @@ class Changelist(object):
if git_footers.get_footer_change_id(new_log_desc):
print('git-cl: Added Change-Id to commit message.')
return new_log_desc
else:
DieWithError('ERROR: Gerrit commit-msg hook not installed.')
DieWithError('ERROR: Gerrit commit-msg hook not installed.')
def CannotTriggerTryJobReason(self):
try:
......@@ -3341,10 +3341,10 @@ def CMDbaseurl(parser, args):
print('Current base-url:')
return RunGit(['config', 'branch.%s.base-url' % branch],
error_ok=False).strip()
else:
print('Setting base-url to %s' % args[0])
return RunGit(['config', 'branch.%s.base-url' % branch, args[0]],
error_ok=False).strip()
print('Setting base-url to %s' % args[0])
return RunGit(['config', 'branch.%s.base-url' % branch, args[0]],
error_ok=False).strip()
def color_for_status(status):
......@@ -3605,13 +3605,15 @@ def CMDarchive(parser, args):
if options.dry_run:
print('\nNo changes were made (dry run).\n')
return 0
elif any(branch == current_branch for branch, _ in proposal):
if any(branch == current_branch for branch, _ in proposal):
print('You are currently on a branch \'%s\' which is associated with a '
'closed codereview issue, so archive cannot proceed. Please '
'checkout another branch and run this command again.' %
current_branch)
return 1
elif not options.force:
if not options.force:
answer = gclient_utils.AskForData('\nProceed with deletion (Y/n)? ').lower()
if answer not in ('y', ''):
print('Aborted.')
......@@ -4148,7 +4150,9 @@ def GetTargetRef(remote, remote_branch, target_branch):
if not match:
# This is a branch path but not one we recognize; use as-is.
remote_branch = target_branch
# pylint: disable=consider-using-get
elif remote_branch in REFS_THAT_ALIAS_TO_OTHER_REFS:
# pylint: enable=consider-using-get
# Handle the refs that need to land in different refs.
remote_branch = REFS_THAT_ALIAS_TO_OTHER_REFS[remote_branch]
......@@ -4565,8 +4569,10 @@ def GetTreeStatus(url=None):
status = str(urllib.request.urlopen(url).read().lower())
if status.find('closed') != -1 or status == '0':
return 'closed'
elif status.find('open') != -1 or status == '1':
if status.find('open') != -1 or status == '1':
return 'open'
return 'unknown'
return 'unset'
......@@ -5102,8 +5108,8 @@ def _RunRustFmt(opts, rust_diff_files, top_dir, upstream_commit):
if opts.presubmit and rustfmt_exitcode != 0:
return 2
else:
return 0
return 0
def MatchingFileType(file_name, extensions):
......
......@@ -46,6 +46,7 @@ from io import BytesIO
if sys.version_info.major == 2:
# On Python 3, BrokenPipeError is raised instead.
# pylint:disable=redefined-builtin
BrokenPipeError = IOError
......@@ -874,7 +875,7 @@ def status():
while c != b'':
c = stream.read(1)
if c in (None, b'', b'\0'):
if len(acc.getvalue()):
if len(acc.getvalue()) > 0:
yield acc.getvalue()
acc = BytesIO()
else:
......
......@@ -71,7 +71,8 @@ def split_footers(message):
for line in reversed(message_lines):
if line == '' or line.isspace():
break
elif parse_footer(line):
if parse_footer(line):
footer_lines.extend(maybe_footer_lines)
maybe_footer_lines = []
footer_lines.append(line)
......@@ -182,8 +183,8 @@ def get_unique(footers, key):
assert len(values) <= 1, 'Multiple %s footers' % key
if values:
return values[0]
else:
return None
return None
def get_position(footers):
......
......@@ -31,6 +31,7 @@ from third_party import colorama
if sys.version_info.major == 2:
# On Python 3, BrokenPipeError is raised instead.
# pylint:disable=redefined-builtin
BrokenPipeError = IOError
......@@ -68,7 +69,7 @@ def _print_help(outbuf):
def _color_branch(branch, all_branches, all_tags, current):
if branch == current or branch == 'HEAD -> ' + current:
if branch in (current, 'HEAD -> ' + current):
color = CYAN
current = None
elif branch in all_branches:
......
......@@ -41,7 +41,8 @@ def main(args):
if not downstreams:
print("No downstream branches")
return 1
elif len(downstreams) == 1:
if len(downstreams) == 1:
run('checkout', downstreams[0], stdout=sys.stdout, stderr=sys.stderr)
else:
high = len(downstreams) - 1
......
......@@ -62,8 +62,8 @@ def pathlify(hash_prefix):
"""
if sys.version_info.major == 3:
return '/'.join('%02x' % b for b in hash_prefix)
else:
return '/'.join('%02x' % ord(b) for b in hash_prefix)
return '/'.join('%02x' % ord(b) for b in hash_prefix)
@git.memoize_one(threadsafe=False)
......
......@@ -66,8 +66,7 @@ def main(args):
print(
'gn.py: Could not find gn executable at: %s' % gn_path, file=sys.stderr)
return 2
else:
return subprocess.call([gn_path] + args[1:])
return subprocess.call([gn_path] + args[1:])
if __name__ == '__main__':
......
......@@ -54,7 +54,8 @@ def get_notice_footer():
def get_change_notice(version):
if version == 0:
return [] # No changes for version 0
elif version == 1:
if version == 1:
return [
'We want to collect the Git version.',
'We want to collect information about the HTTP',
......@@ -64,7 +65,8 @@ def get_change_notice(version):
'We only collect known strings to make sure we',
'don\'t record PII.',
]
elif version == 2:
if version == 2:
return [
'We will start collecting metrics from bots.',
'There are no changes for developers.',
......
......@@ -68,7 +68,7 @@ try:
from dateutil.relativedelta import relativedelta
except ImportError:
logging.error('python-dateutil package required')
exit(1)
sys.exit(1)
class DefaultFormatter(Formatter):
......@@ -76,10 +76,11 @@ class DefaultFormatter(Formatter):
super(DefaultFormatter, self).__init__()
self.default = default
def get_value(self, key, args, kwds):
if isinstance(key, str) and key not in kwds:
def get_value(self, key, args, kwargs):
if isinstance(key, str) and key not in kwargs:
return self.default
return Formatter.get_value(self, key, args, kwds)
return Formatter.get_value(self, key, args, kwargs)
gerrit_instances = [
{
......@@ -168,9 +169,10 @@ def get_week_of(date):
def get_yes_or_no(msg):
while True:
response = gclient_utils.AskForData(msg + ' yes/no [no] ')
if response == 'y' or response == 'yes':
if response in ('y', 'yes'):
return True
elif not response or response == 'n' or response == 'no':
if not response or response in ('n', 'no'):
return False
......@@ -410,7 +412,7 @@ class MyActivity(object):
return [
issue for issue in issues
if issue['author'] == user_str or issue['owner'] == user_str]
if user_str in (issue['author'], issue['owner'])]
def monorail_get_issues(self, project, issue_ids):
return self.monorail_query_issues(project, {
......@@ -531,7 +533,7 @@ class MyActivity(object):
return True
def filter_modified(self, modified):
return self.modified_after < modified and modified < self.modified_before
return self.modified_after < modified < self.modified_before
def auth_for_changes(self):
#TODO(cjhopman): Move authentication check for getting changes here.
......@@ -686,12 +688,12 @@ class MyActivity(object):
return output
class PythonObjectEncoder(json.JSONEncoder):
def default(self, obj): # pylint: disable=method-hidden
if isinstance(obj, datetime):
return obj.isoformat()
if isinstance(obj, set):
return list(obj)
return json.JSONEncoder.default(self, obj)
def default(self, o): # pylint: disable=method-hidden
if isinstance(o, datetime):
return o.isoformat()
if isinstance(o, set):
return list(o)
return json.JSONEncoder.default(self, o)
output = {
'reviews': format_for_json_dump(self.reviews),
......@@ -918,16 +920,16 @@ def main():
config = json.load(f)
for item, entries in config.items():
if item == 'gerrit_instances':
for repo, dic in entries.items():
# Use property name as URL
dic['url'] = repo
gerrit_instances.append(dic)
elif item == 'monorail_projects':
monorail_projects.append(entries)
else:
logging.error('Invalid entry in config file.')
return 1
if item == 'gerrit_instances':
for repo, dic in entries.items():
# Use property name as URL
dic['url'] = repo
gerrit_instances.append(dic)
elif item == 'monorail_projects':
monorail_projects.append(entries)
else:
logging.error('Invalid entry in config file.')
return 1
my_activity = MyActivity(options)
my_activity.show_progress('Loading data')
......
......@@ -95,31 +95,39 @@ class OwnersFinder(object):
while True:
inp = self.input_command(owner)
if inp == 'y' or inp == 'yes':
if inp in ('y', 'yes'):
self.select_owner(owner)
break
elif inp == 'n' or inp == 'no':
if inp in ('n', 'no'):
self.deselect_owner(owner)
break
elif inp == '' or inp == 'd' or inp == 'defer':
if inp in ('', 'd', 'defer'):
self.owners_queue.append(self.owners_queue.pop(0))
break
elif inp == 'f' or inp == 'files':
if inp in ('f', 'files'):
self.list_files()
break
elif inp == 'o' or inp == 'owners':
if inp in ('o', 'owners'):
self.list_owners(self.owners_queue)
break
elif inp == 'p' or inp == 'pick':
if inp in ('p', 'pick'):
self.pick_owner(gclient_utils.AskForData('Pick an owner: '))
break
elif inp.startswith('p ') or inp.startswith('pick '):
if inp.startswith('p ') or inp.startswith('pick '):
self.pick_owner(inp.split(' ', 2)[1].strip())
break
elif inp == 'r' or inp == 'restart':
if inp in ('r', 'restart'):
self.reset()
break
elif inp == 'q' or inp == 'quit':
if inp in ('q', 'quit'):
# Exit with error
return 1
......@@ -268,11 +276,13 @@ class OwnersFinder(object):
self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually. ' +
'It\'s an invalid name or not related to the change list.')
return False
elif ow in self.selected_owners:
if ow in self.selected_owners:
self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually. ' +
'It\'s already selected.')
return False
elif ow in self.deselected_owners:
if ow in self.deselected_owners:
self.writeln('You cannot pick ' + self.bold_name(ow) + ' manually.' +
'It\'s already unselected.')
return False
......
......@@ -70,9 +70,9 @@ def CheckChangeHasBugField(input_api, output_api):
'Buganizer bugs should be prefixed with b:, not b/.')
]
return []
else:
return [output_api.PresubmitNotifyResult(
'If this change has an associated bug, add Bug: [bug number].')]
return [output_api.PresubmitNotifyResult(
'If this change has an associated bug, add Bug: [bug number].')]
def CheckChangeHasNoUnwantedTags(input_api, output_api):
UNWANTED_TAGS = {
......@@ -94,12 +94,13 @@ def CheckChangeHasNoUnwantedTags(input_api, output_api):
def CheckDoNotSubmitInDescription(input_api, output_api):
"""Checks that the user didn't add 'DO NOT ''SUBMIT' to the CL description.
"""
keyword = 'DO NOT ''SUBMIT'
# Keyword is concatenated to avoid presubmit check rejecting the CL.
keyword = 'DO NOT ' + 'SUBMIT'
if keyword in input_api.change.DescriptionText():
return [output_api.PresubmitError(
keyword + ' is present in the changelist description.')]
else:
return []
return []
def CheckChangeHasDescription(input_api, output_api):
......@@ -108,8 +109,8 @@ def CheckChangeHasDescription(input_api, output_api):
if text.strip() == '':
if input_api.is_committing:
return [output_api.PresubmitError('Add a description to the CL.')]
else:
return [output_api.PresubmitNotifyResult('Add a description to the CL.')]
return [output_api.PresubmitNotifyResult('Add a description to the CL.')]
return []
......@@ -188,7 +189,9 @@ def CheckDoNotSubmitInFiles(input_api, output_api):
"""Checks that the user didn't add 'DO NOT ''SUBMIT' to any files."""
# We want to check every text file, not just source files.
file_filter = lambda x : x
keyword = 'DO NOT ''SUBMIT'
# Keyword is concatenated to avoid presubmit check rejecting the CL.
keyword = 'DO NOT ' + 'SUBMIT'
def DoNotSubmitRule(extension, line):
try:
return keyword not in line
......@@ -315,7 +318,7 @@ def CheckGenderNeutral(input_api, output_api, source_file_filter=None):
if gendered_re.search(line):
errors.append('%s (%d): %s' % (f.LocalPath(), line_num, line))
if len(errors):
if errors:
return [output_api.PresubmitPromptWarning('Found a gendered pronoun in:',
long_text='\n'.join(errors))]
return []
......@@ -526,7 +529,7 @@ def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None):
"""True iff the pylint directive starting at line[pos] is global."""
# Any character before |pos| that is not whitespace or '#' indidcates
# this is a local directive.
return not any([c not in " \t#" for c in line[:pos]])
return not any(c not in " \t#" for c in line[:pos])
def check_python_long_lines(affected_files, error_formatter):
errors = []
......@@ -583,8 +586,8 @@ def CheckLongLines(input_api, output_api, maxlen, source_file_filter=None):
if errors:
msg = 'Found lines longer than %s characters (first 5 shown).' % maxlen
return [output_api.PresubmitPromptWarning(msg, items=errors[:5])]
else:
return []
return []
def CheckLicense(input_api, output_api, license_re=None, project_name=None,
......@@ -960,7 +963,7 @@ def GetPylint(input_api,
extra_paths_list = extra_paths_list or []
assert version in ('1.5', '2.6', '2.7'), \
'Unsupported pylint version: ' + version
'Unsupported pylint version: %s' % version
python2 = (version == '1.5')
if input_api.is_committing:
......@@ -1069,11 +1072,10 @@ def GetPylint(input_api,
GetPylintCmd(files, ["--disable=cyclic-import"], True),
GetPylintCmd(files, ["--disable=all", "--enable=cyclic-import"], False)
]
else:
return [ GetPylintCmd(files, [], True) ]
else:
return map(lambda x: GetPylintCmd([x], [], 1), files)
return [ GetPylintCmd(files, [], True) ]
return map(lambda x: GetPylintCmd([x], [], 1), files)
def RunPylint(input_api, *args, **kwargs):
......@@ -1090,11 +1092,11 @@ def CheckDirMetadataFormat(input_api, output_api, dirmd_bin=None):
# complete.
file_filter = lambda f: (
input_api.basename(f.LocalPath()) in ('DIR_METADATA', 'OWNERS'))
affected_files = set([
affected_files = {
f.AbsoluteLocalPath()
for f in input_api.change.AffectedFiles(
include_deletes=False, file_filter=file_filter)
])
}
if not affected_files:
return []
......@@ -1145,11 +1147,11 @@ def CheckOwnersDirMetadataExclusive(input_api, output_api):
input_api.re.MULTILINE)
file_filter = (
lambda f: input_api.basename(f.LocalPath()) in ('OWNERS', 'DIR_METADATA'))
affected_dirs = set([
affected_dirs = {
input_api.os_path.dirname(f.AbsoluteLocalPath())
for f in input_api.change.AffectedFiles(
include_deletes=False, file_filter=file_filter)
])
}
errors = []
for path in affected_dirs:
......@@ -1173,11 +1175,11 @@ def CheckOwnersDirMetadataExclusive(input_api, output_api):
def CheckOwnersFormat(input_api, output_api):
if input_api.gerrit and input_api.gerrit.IsCodeOwnersEnabledOnRepo():
return []
affected_files = set([
affected_files = {
f.LocalPath()
for f in input_api.change.AffectedFiles()
if 'OWNERS' in f.LocalPath() and f.Action() != 'D'
])
}
if not affected_files:
return []
try:
......@@ -1202,8 +1204,9 @@ def CheckOwners(
if input_api.gerrit and input_api.gerrit.IsCodeOwnersEnabledOnRepo():
return []
affected_files = set([f.LocalPath() for f in
input_api.change.AffectedFiles(file_filter=source_file_filter)])
affected_files = {f.LocalPath() for f in
input_api.change.AffectedFiles(
file_filter=source_file_filter)}
owner_email, reviewers = GetCodereviewOwnerAndReviewers(
input_api, approval_needed=input_api.is_committing)
......@@ -1735,7 +1738,7 @@ def CheckChangedLUCIConfigs(input_api, output_api):
sev = msg['severity']
if sev == 'WARNING':
out_f = output_api.PresubmitPromptWarning
elif sev == 'ERROR' or sev == 'CRITICAL':
elif sev in ('ERROR', 'CRITICAL'):
out_f = output_api.PresubmitError
else:
out_f = output_api.PresubmitNotifyResult
......
......@@ -124,7 +124,7 @@ class MockInputApi(object):
if file_.LocalPath() == filename:
return '\n'.join(file_.NewContents())
# Otherwise, file is not in our mock API.
raise IOError, "No such file or directory: '%s'" % filename
raise IOError("No such file or directory: '%s'" % filename)
class MockOutputApi(object):
......
......@@ -330,9 +330,11 @@ class _PresubmitResult(object):
"""
if isinstance(val, str):
return val
if six.PY2 and isinstance(val, unicode):
return val.encode()
elif six.PY3 and isinstance(val, bytes):
if six.PY3 and isinstance(val, bytes):
return val.decode()
raise ValueError("Unknown string type %s" % type(val))
......@@ -379,7 +381,6 @@ class _PresubmitPromptWarning(_PresubmitResult):
# Public access through OutputApi object.
class _PresubmitNotifyResult(_PresubmitResult):
"""Just print something to the screen -- but it's not even a warning."""
pass
# Top level object so multiprocessing can pickle
......@@ -1282,7 +1283,7 @@ class Change(object):
def owners_file_filter(f):
return 'OWNERS' in os.path.split(f.LocalPath())[1]
files = self.AffectedFiles(file_filter=owners_file_filter)
return dict([(f.LocalPath(), f.OldContents()) for f in files])
return {f.LocalPath(): f.OldContents() for f in files}
class GitChange(Change):
......@@ -1313,7 +1314,7 @@ def ListRelevantPresubmitFiles(files, root):
files = [normpath(os.path.join(root, f)) for f in files]
# List all the individual directories containing files.
directories = set([os.path.dirname(f) for f in files])
directories = {os.path.dirname(f) for f in files}
# Ignore root if inherit-review-settings-ok is present.
if os.path.isfile(os.path.join(root, 'inherit-review-settings-ok')):
......@@ -1746,7 +1747,7 @@ def DoPresubmitChecks(change,
global _ASKED_FOR_FEEDBACK
# Ask for feedback one time out of 5.
if (len(results) and random.randint(0, 4) == 0 and not _ASKED_FOR_FEEDBACK):
if (results and random.randint(0, 4) == 0 and not _ASKED_FOR_FEEDBACK):
sys.stdout.write(
'Was the presubmit check useful? If not, run "git cl presubmit -v"\n'
'to figure out which PRESUBMIT.py was run, then run git blame\n'
......
......@@ -71,23 +71,23 @@ def determine_scm(root):
"""
if os.path.isdir(os.path.join(root, '.git')):
return 'git'
else:
try:
subprocess2.check_call(
['git', 'rev-parse', '--show-cdup'],
stdout=subprocess2.DEVNULL,
stderr=subprocess2.DEVNULL,
cwd=root)
return 'git'
except (OSError, subprocess2.CalledProcessError):
return None
try:
subprocess2.check_call(
['git', 'rev-parse', '--show-cdup'],
stdout=subprocess2.DEVNULL,
stderr=subprocess2.DEVNULL,
cwd=root)
return 'git'
except (OSError, subprocess2.CalledProcessError):
return None
def only_int(val):
if val.isdigit():
return int(val)
else:
return 0
return 0
class GIT(object):
......@@ -261,7 +261,8 @@ class GIT(object):
if 'origin/main' in remote_branches:
# Fall back on origin/main if it exits.
return 'origin', 'refs/heads/main'
elif 'origin/master' in remote_branches:
if 'origin/master' in remote_branches:
# Fall back on origin/master if it exits.
return 'origin', 'refs/heads/master'
......
......@@ -147,11 +147,11 @@ class CommandDispatcher(object):
reverse=True)
if (hamming_commands[0][0] - hamming_commands[1][0]) < 0.3:
# Too ambiguous.
return
return None
if hamming_commands[0][0] < 0.8:
# Not similar enough. Don't be a fool and run a random command.
return
return None
return commands[hamming_commands[0][1]]
......
......@@ -49,7 +49,7 @@ def read_tree(tree_root):
dirs.remove(d)
for f in [join(root, f) for f in files if not f.startswith('.')]:
filepath = f[len(tree_root) + 1:].replace(os.sep, '/')
assert len(filepath), f
assert len(filepath) > 0, f
with io.open(join(root, f), encoding='utf-8') as f:
tree[filepath] = f.read()
return tree
......
......@@ -9,8 +9,6 @@ import os
import sys
import unittest
#import test_env # pylint: disable=relative-import,unused-import
sys.path.insert(0, os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'recipes', 'recipe_modules', 'bot_update', 'resources'))
......
......@@ -59,8 +59,8 @@ class GsutilMock(object):
if fn:
fn()
return code
else:
return 0
return 0
def check_call(self, *args):
with self.lock:
......@@ -70,8 +70,8 @@ class GsutilMock(object):
if fn:
fn()
return code, out, err
else:
return (0, '', '')
return (0, '', '')
def check_call_with_retries(self, *args):
return self.check_call(*args)
......
......@@ -165,10 +165,10 @@ from :3
return self.OptionsObject(*args, **kwargs)
def checkstdout(self, expected):
# pylint: disable=no-member
value = sys.stdout.getvalue()
sys.stdout.close()
# Check that the expected output appears.
# pylint: disable=no-member
self.assertIn(expected, strip_timestamps(value))
@staticmethod
......@@ -597,10 +597,10 @@ class ManagedGitWrapperTestCaseMock(unittest.TestCase):
return self.OptionsObject(*args, **kwargs)
def checkstdout(self, expected):
# pylint: disable=no-member
value = sys.stdout.getvalue()
sys.stdout.close()
# Check that the expected output appears.
# pylint: disable=no-member
self.assertIn(expected, strip_timestamps(value))
def setUp(self):
......@@ -711,15 +711,15 @@ class ManagedGitWrapperTestCaseMock(unittest.TestCase):
class UnmanagedGitWrapperTestCase(BaseGitWrapperTestCase):
def checkInStdout(self, expected):
# pylint: disable=no-member
value = sys.stdout.getvalue()
sys.stdout.close()
# pylint: disable=no-member
self.assertIn(expected, value)
def checkNotInStdout(self, expected):
# pylint: disable=no-member
value = sys.stdout.getvalue()
sys.stdout.close()
# pylint: disable=no-member
self.assertNotIn(expected, value)
def getCurrentBranch(self):
......
......@@ -143,5 +143,3 @@ class GClientSmokeBase(fake_repos.FakeReposTestBase):
self.checkString(results[i][0][2], path, (i, results[i][0][2], path))
self.assertEqual(len(results), len(items), (stdout, items, len(results)))
return results
......@@ -42,9 +42,11 @@ class TestGitFindReleases(unittest.TestCase):
assert len(args) > 1
if args[0] == 'name-rev' and args[1] == '--tags':
return 'undefined'
elif args[0] == 'name-rev' and args[1] == '--refs':
if args[0] == 'name-rev' and args[1] == '--refs':
return '1.0.0'
elif args[0] == 'log':
if args[0] == 'log':
return ''
assert False, "Unexpected arguments for git.run"
......
......@@ -53,11 +53,14 @@ class CMDFormatTestCase(unittest.TestCase):
def RunMock(*args):
if args[0] == 'remote':
return 'origin\ngerrit'
elif args[0] == 'fetch':
if args[0] == 'fetch':
return
elif args[0] == 'branch':
if args[0] == 'branch':
return
elif args[0] == 'config':
if args[0] == 'config':
return
raise Exception('Did not expect such run git command: %s' % args[0])
......
......@@ -1261,7 +1261,7 @@ class InputApiUnittest(PresubmitTestsBase):
# Ignores weird because of check_list, third_party because of skip_list,
# binary isn't a text file and being deleted doesn't exist. The rest is
# outside foo/.
rhs_lines = [x for x in input_api.RightHandSideLines(None)]
rhs_lines = list(input_api.RightHandSideLines(None))
self.assertEqual(len(rhs_lines), 14)
self.assertEqual(rhs_lines[0][0].LocalPath(),
presubmit.normpath(files[0][1]))
......@@ -2282,8 +2282,8 @@ the current line as well!
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 2037 Nobody." "\n"
r".*? All Rights Reserved\." "\n"
r".*? Copyright \(c\) 2037 Nobody.\n"
r".*? All Rights Reserved\.\n"
)
self._LicenseCheck(text, license_text, True, None)
......@@ -2295,8 +2295,8 @@ the current line as well!
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 0007 Nobody." "\n"
r".*? All Rights Reserved\." "\n"
r".*? Copyright \(c\) 0007 Nobody.\n"
r".*? All Rights Reserved\.\n"
)
self._LicenseCheck(text, license_text, True,
presubmit.OutputApi.PresubmitPromptWarning)
......@@ -2309,8 +2309,8 @@ the current line as well!
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 0007 Nobody." "\n"
r".*? All Rights Reserved\." "\n"
r".*? Copyright \(c\) 0007 Nobody.\n"
r".*? All Rights Reserved\.\n"
)
self._LicenseCheck(text, license_text, False,
presubmit.OutputApi.PresubmitPromptWarning)
......@@ -2318,8 +2318,8 @@ the current line as well!
def testCheckLicenseEmptySuccess(self):
text = ''
license_text = (
r".*? Copyright \(c\) 2037 Nobody." "\n"
r".*? All Rights Reserved\." "\n"
r".*? Copyright \(c\) 2037 Nobody.\n"
r".*? All Rights Reserved\.\n"
)
self._LicenseCheck(text, license_text, True, None, accept_empty_files=True)
......@@ -2449,14 +2449,14 @@ the current line as well!
subprocess.Popen.return_value = process
presubmit.sigint_handler.wait.return_value = (b'', None)
pylint = os.path.join(_ROOT, 'pylint-1.5')
pylint = os.path.join(_ROOT, 'pylint-2.7')
pylintrc = os.path.join(_ROOT, 'pylintrc')
env = {str('PYTHONPATH'): str('')}
if sys.platform == 'win32':
pylint += '.bat'
results = presubmit_canned_checks.RunPylint(
input_api, presubmit.OutputApi)
input_api, presubmit.OutputApi, version='2.7')
self.assertEqual([], results)
self.assertEqual(subprocess.Popen.mock_calls, [
......
......@@ -141,6 +141,7 @@ class SmokeTests(unittest.TestCase):
def test_check_output_no_stdout(self):
for subp in (subprocess, subprocess2):
with self.assertRaises(ValueError):
# pylint: disable=unexpected-keyword-arg
subp.check_output(TEST_COMMAND, stdout=subp.PIPE)
def test_print_exception(self):
......
......@@ -135,10 +135,10 @@ def get_targets(args, parser, use_null_terminator):
# Take stdin as a newline or null separated list of files.
if use_null_terminator:
return sys.stdin.read().split('\0')
else:
return sys.stdin.read().splitlines()
else:
return args
return sys.stdin.read().splitlines()
return args
def upload_to_google_storage(
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment