Commit 8e57b4bc authored by Marc-Antoine Ruel's avatar Marc-Antoine Ruel Committed by Commit Bot

python3 improvements

Ran:
  vi $(git grep --name-only iteritems | grep -v third_party)
  vi $(git grep --name-only itervalues | grep -v third_party)
  vi $(git grep --name-only 'print ' | grep -v third_party)

and edited the files quickly with adhoc macros. Then ran in recipes/:
  ./recipes.py test train

There was only a small subset of files that had been updated to use
six.iteritems() and six.itervalues(). Since the dataset size that is
being used in gclient is small (pretty much always below 200 items),
it's better to just switch to .items() right away and take the temporary
performance hit, so that we don't need to come back to rewrite the code.

Recipe-Nontrivial-Roll: build
Bug: 984182
Change-Id: I5faf11486b66b0d73c9098ab0f2ce1b15a45c53e
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/1854900
Commit-Queue: Edward Lesmes <ehmaldonado@chromium.org>
Reviewed-by: 's avatarEdward Lesmes <ehmaldonado@chromium.org>
Auto-Submit: Marc-Antoine Ruel <maruel@chromium.org>
parent e1410883
......@@ -41,7 +41,7 @@ def parse_got_revision(filename, revision_mapping):
with open(filename) as f:
data = json.load(f)
for path, info in data['solutions'].iteritems():
for path, info in data['solutions'].items():
# gclient json paths always end with a slash
path = path.rstrip('/')
if path in revision_mapping:
......@@ -52,7 +52,7 @@ def parse_got_revision(filename, revision_mapping):
def emit_buildprops(got_revisions):
for prop, revision in got_revisions.iteritems():
for prop, revision in got_revisions.items():
print('@@@SET_BUILD_PROPERTY@%s@%s@@@' % (prop, json.dumps(revision)))
......
......@@ -5,7 +5,7 @@
# found in the LICENSE file.
# Set unique build ID.
AUTONINJA_BUILD_ID="$(python -c "import uuid; print uuid.uuid4()")"
AUTONINJA_BUILD_ID="$(python -c "import uuid; print(uuid.uuid4())")"
export AUTONINJA_BUILD_ID
if [ "$NINJA_SUMMARIZE_BUILD" == "1" ]; then
......
......@@ -908,7 +908,7 @@ class _CppLintState(object):
def PrintErrorCounts(self):
"""Print a summary of errors by category, and the total."""
for category, count in self.errors_by_category.iteritems():
for category, count in self.errors_by_category.items():
sys.stderr.write('Category \'%s\' errors found: %d\n' %
(category, count))
sys.stderr.write('Total errors found: %d\n' % self.error_count)
......@@ -4481,7 +4481,7 @@ def _GetTextInside(text, start_pattern):
# Give opening punctuations to get the matching close-punctuations.
matching_punctuation = {'(': ')', '{': '}', '[': ']'}
closing_punctuation = set(matching_punctuation.itervalues())
closing_punctuation = set(matching_punctuation.values())
# Find the position to start extracting text.
match = re.search(start_pattern, text, re.M)
......
......@@ -590,7 +590,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
# If a line is in custom_deps, but not in the solution, we want to append
# this line to the solution.
for dep_name, dep_info in six.iteritems(self.custom_deps):
for dep_name, dep_info in self.custom_deps.items():
if dep_name not in deps:
deps[dep_name] = {'url': dep_info, 'dep_type': 'git'}
......@@ -601,13 +601,13 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
# recursively included by "src/ios_foo/DEPS" should also require
# "checkout_ios=True".
if self.condition:
for value in six.itervalues(deps):
for value in deps.values():
gclient_eval.UpdateCondition(value, 'and', self.condition)
if rel_prefix:
logging.warning('use_relative_paths enabled.')
rel_deps = {}
for d, url in six.iteritems(deps):
for d, url in deps.items():
# normpath is required to allow DEPS to use .. in their
# dependency local path.
rel_deps[os.path.normpath(os.path.join(rel_prefix, d))] = url
......@@ -619,7 +619,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
def _deps_to_objects(self, deps, use_relative_paths):
"""Convert a deps dict to a dict of Dependency objects."""
deps_to_add = []
for name, dep_value in six.iteritems(deps):
for name, dep_value in deps.items():
should_process = self.should_process
if dep_value is None:
continue
......@@ -727,7 +727,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
self._vars = local_scope.get('vars', {})
if self.parent:
for key, value in six.iteritems(self.parent.get_vars()):
for key, value in self.parent.get_vars().items():
if key in self._vars:
self._vars[key] = value
# Since we heavily post-process things, freeze ones which should
......@@ -764,7 +764,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
if rel_prefix:
logging.warning('Updating recursedeps by prepending %s.', rel_prefix)
rel_deps = {}
for depname, options in six.iteritems(self.recursedeps):
for depname, options in self.recursedeps.items():
rel_deps[
os.path.normpath(os.path.join(rel_prefix, depname))] = options
self.recursedeps = rel_deps
......@@ -1600,7 +1600,7 @@ it or fix the checkout.
full_entries = [os.path.join(self.root_dir, e.replace('/', os.path.sep))
for e in entries]
for entry, prev_url in six.iteritems(self._ReadEntries()):
for entry, prev_url in self._ReadEntries().items():
if not prev_url:
# entry must have been overridden via .gclient custom_deps
continue
......@@ -1747,7 +1747,7 @@ it or fix the checkout.
'The following --patch-ref flags were not used. Please fix it:\n%s' %
('\n'.join(
patch_repo + '@' + patch_ref
for patch_repo, patch_ref in six.iteritems(patch_refs))))
for patch_repo, patch_ref in patch_refs.items())))
# Once all the dependencies have been processed, it's now safe to write
# out the gn_args_file and run the hooks.
......@@ -1834,7 +1834,7 @@ it or fix the checkout.
'url': rev.split('@')[0] if rev else None,
'rev': rev.split('@')[1] if rev and '@' in rev else None,
}
for name, rev in six.iteritems(entries)
for name, rev in entries.items()
}
if self._options.output_json == '-':
print(json.dumps(json_output, indent=2, separators=(',', ': ')))
......@@ -2122,7 +2122,7 @@ class Flattener(object):
self._flatten_dep(solution)
if pin_all_deps:
for dep in six.itervalues(self._deps):
for dep in self._deps.values():
self._pin_dep(dep)
def add_deps_file(dep):
......@@ -2140,7 +2140,7 @@ class Flattener(object):
return
assert dep.url
self._deps_files.add((dep.url, deps_file, dep.hierarchy_data()))
for dep in six.itervalues(self._deps):
for dep in self._deps.values():
add_deps_file(dep)
gn_args_dep = self._deps.get(self._client.dependencies[0]._gn_args_from,
......@@ -2183,7 +2183,7 @@ class Flattener(object):
# Only include vars explicitly listed in the DEPS files or gclient solution,
# not automatic, local overrides (i.e. not all of dep.get_vars()).
hierarchy = dep.hierarchy(include_url=False)
for key, value in six.iteritems(dep._vars):
for key, value in dep._vars.items():
# Make sure there are no conflicting variables. It is fine however
# to use same variable name, as long as the value is consistent.
assert key not in self._vars or self._vars[key][1] == value, (
......@@ -2191,7 +2191,7 @@ class Flattener(object):
dep.name, key, value, self._vars[key][1]))
self._vars[key] = (hierarchy, value)
# Override explicit custom variables.
for key, value in six.iteritems(dep.custom_vars):
for key, value in dep.custom_vars.items():
# Do custom_vars that don't correspond to DEPS vars ever make sense? DEPS
# conditionals shouldn't be using vars that aren't also defined in the
# DEPS (presubmit actually disallows this), so any new custom_var must be
......@@ -2344,7 +2344,7 @@ def _HooksOsToLines(hooks_os):
if not hooks_os:
return []
s = ['hooks_os = {']
for hook_os, os_hooks in six.iteritems(hooks_os):
for hook_os, os_hooks in hooks_os.items():
s.append(' "%s": [' % hook_os)
for dep, hook in os_hooks:
s.extend([
......
......@@ -52,7 +52,7 @@ class _NodeDict(collections.MutableMapping):
def MoveTokens(self, origin, delta):
if self.tokens:
new_tokens = {}
for pos, token in six.iteritems(self.tokens):
for pos, token in self.tokens.items():
if pos[0] >= origin:
pos = (pos[0] + delta, pos[1])
token = token[:2] + (pos,) + token[3:]
......@@ -509,14 +509,14 @@ def Parse(content, validate_syntax, filename, vars_override=None,
if 'deps_os' in result:
deps = result.setdefault('deps', {})
for os_name, os_deps in six.iteritems(result['deps_os']):
for os_name, os_deps in result['deps_os'].items():
os_deps = _StandardizeDeps(os_deps, vars_dict)
_MergeDepsOs(deps, os_deps, os_name)
del result['deps_os']
if 'hooks_os' in result:
hooks = result.setdefault('hooks', [])
for os_name, os_hooks in six.iteritems(result['hooks_os']):
for os_name, os_hooks in result['hooks_os'].items():
for hook in os_hooks:
UpdateCondition(hook, 'and', 'checkout_' + os_name)
hooks.extend(os_hooks)
......
......@@ -1229,7 +1229,7 @@ class FrozenDict(collections.Mapping):
return True
if len(self) != len(other):
return False
for k, v in self.iteritems():
for k, v in self.items():
if k not in other or other[k] != v:
return False
return True
......
......@@ -238,7 +238,7 @@ class CookiesAuthenticator(Authenticator):
return gitcookies
def _get_auth_for_host(self, host):
for domain, creds in self.gitcookies.iteritems():
for domain, creds in self.gitcookies.items():
if cookielib.domain_match(host, domain):
return (creds[0], None, creds[1])
return self.netrc.authenticators(host)
......@@ -386,7 +386,7 @@ def CreateHttpConn(host, path, reqtype='GET', headers=None, body=None):
headers.setdefault('Content-Type', 'application/json')
if LOGGER.isEnabledFor(logging.DEBUG):
LOGGER.debug('%s %s://%s%s' % (reqtype, GERRIT_PROTOCOL, host, url))
for key, val in headers.iteritems():
for key, val in headers.items():
if key == 'Authorization':
val = 'HIDDEN'
LOGGER.debug('%s: %s' % (key, val))
......@@ -798,7 +798,7 @@ def AddReviewers(host, change, reviewers=None, ccs=None, notify=True,
resp = ReadHttpJsonResponse(conn, accept_statuses=accept_statuses)
errored = set()
for result in resp.get('reviewers', {}).itervalues():
for result in resp.get('reviewers', {}).values():
r = result.get('input')
state = 'REVIEWER' if r in reviewers else 'CC'
if result.get('error'):
......@@ -845,7 +845,7 @@ def SetReview(host, change, msg=None, labels=None, notify=None, ready=None):
conn = CreateHttpConn(host, path, reqtype='POST', body=body)
response = ReadHttpJsonResponse(conn)
if labels:
for key, val in labels.iteritems():
for key, val in labels.items():
if ('labels' not in response or key not in response['labels'] or
int(response['labels'][key] != int(val))):
raise GerritError(200, 'Unable to set "%s" label on change %s.' % (
......
......@@ -315,7 +315,7 @@ def _git_set_branch_config_value(key, value, branch=None, **kwargs):
def _get_properties_from_options(options):
prop_list = getattr(options, 'properties', [])
properties = dict(x.split('=', 1) for x in prop_list)
for key, val in properties.iteritems():
for key, val in properties.items():
try:
properties[key] = json.loads(val)
except ValueError:
......@@ -415,7 +415,7 @@ def _get_bucket_map(changelist, options, option_parser):
output_stream=sys.stdout)
if masters is None:
return None
return {m: b for m, b in masters.iteritems()}
return {m: b for m, b in masters.items()}
if options.bucket:
return {options.bucket: {b: [] for b in options.bot}}
......@@ -452,11 +452,11 @@ def _trigger_try_jobs(auth_config, changelist, buckets, options, patchset):
options: Command-line options.
"""
print('Scheduling jobs on:')
for bucket, builders_and_tests in sorted(buckets.iteritems()):
for bucket, builders_and_tests in sorted(buckets.items()):
print('Bucket:', bucket)
print('\n'.join(
' %s: %s' % (builder, tests)
for builder, tests in sorted(builders_and_tests.iteritems())))
for builder, tests in sorted(builders_and_tests.items())))
print('To see results here, run: git cl try-results')
print('To see results in browser, run: git cl web')
......@@ -495,13 +495,13 @@ def _make_try_job_schedule_requests(changelist, buckets, options, patchset):
'value': '1'})
requests = []
for raw_bucket, builders_and_tests in sorted(buckets.iteritems()):
for raw_bucket, builders_and_tests in sorted(buckets.items()):
project, bucket = _parse_bucket(raw_bucket)
if not project or not bucket:
print('WARNING Could not parse bucket "%s". Skipping.' % raw_bucket)
continue
for builder, tests in sorted(builders_and_tests.iteritems()):
for builder, tests in sorted(builders_and_tests.items()):
properties = shared_properties.copy()
if 'presubmit' in builder.lower():
properties['dry_run'] = 'true'
......@@ -1874,7 +1874,7 @@ class Changelist(object):
# Add the robot comments onto the list of comments, but only
# keep those that are from the latest patchset.
latest_patch_set = self.GetMostRecentPatchset()
for path, robot_comments in robot_file_comments.iteritems():
for path, robot_comments in robot_file_comments.items():
line_comments = file_comments.setdefault(path, [])
line_comments.extend(
[c for c in robot_comments if c['patch_set'] == latest_patch_set])
......@@ -1883,7 +1883,7 @@ class Changelist(object):
# {author+date: {path: {patchset: {line: url+message}}}}
comments = collections.defaultdict(
lambda: collections.defaultdict(lambda: collections.defaultdict(dict)))
for path, line_comments in file_comments.iteritems():
for path, line_comments in file_comments.items():
for comment in line_comments:
tag = comment.get('tag', '')
if tag.startswith('autogenerated') and 'robot_id' not in comment:
......@@ -2095,7 +2095,7 @@ class Changelist(object):
patchset = int(revision_info['_number'])
else:
patchset = parsed_issue_arg.patchset
for revision_info in detail['revisions'].itervalues():
for revision_info in detail['revisions'].values():
if int(revision_info['_number']) == parsed_issue_arg.patchset:
break
else:
......@@ -2672,7 +2672,7 @@ class Changelist(object):
has_patchset = any(
int(revision_data['_number']) == patchset
for revision_data in data['revisions'].itervalues())
for revision_data in data['revisions'].values())
if not has_patchset:
raise Exception('Patchset %d is not known in Gerrit change %d' %
(patchset, self.GetIssue()))
......@@ -3246,8 +3246,8 @@ class _GitCookiesChecker(object):
self._all_hosts = [
(h, u, s)
for h, u, s in itertools.chain(
((h, u, '.netrc') for h, (u, _, _) in a.netrc.hosts.iteritems()),
((h, u, '.gitcookies') for h, (u, _) in a.gitcookies.iteritems())
((h, u, '.netrc') for h, (u, _, _) in a.netrc.hosts.items()),
((h, u, '.gitcookies') for h, (u, _) in a.gitcookies.items())
)
if h.endswith(self._GOOGLESOURCE)
]
......@@ -3328,18 +3328,18 @@ class _GitCookiesChecker(object):
def get_partially_configured_hosts(self):
return set(
(host if i1 else self._canonical_gerrit_googlesource_host(host))
for host, (i1, i2) in self._get_git_gerrit_identity_pairs().iteritems()
for host, (i1, i2) in self._get_git_gerrit_identity_pairs().items()
if None in (i1, i2) and host != '.' + self._GOOGLESOURCE)
def get_conflicting_hosts(self):
return set(
host
for host, (i1, i2) in self._get_git_gerrit_identity_pairs().iteritems()
for host, (i1, i2) in self._get_git_gerrit_identity_pairs().items()
if None not in (i1, i2) and i1 != i2)
def get_duplicated_hosts(self):
counters = collections.Counter(h for h, _, _ in self.get_hosts_with_creds())
return set(host for host, count in counters.iteritems() if count > 1)
return set(host for host, count in counters.items() if count > 1)
_EXPECTED_HOST_IDENTITY_DOMAINS = {
'chromium.googlesource.com': 'chromium.org',
......@@ -3352,7 +3352,7 @@ class _GitCookiesChecker(object):
Note: skips hosts which have conflicting identities for Git and Gerrit.
"""
hosts = set()
for host, expected in self._EXPECTED_HOST_IDENTITY_DOMAINS.iteritems():
for host, expected in self._EXPECTED_HOST_IDENTITY_DOMAINS.items():
pair = self._get_git_gerrit_identity_pairs().get(host)
if pair and pair[0] == pair[1]:
_, domain = self._parse_identity(pair[0])
......@@ -4722,7 +4722,7 @@ def CMDtry(parser, args):
print('There are no failed jobs in the latest set of jobs '
'(patchset #%d), doing nothing.' % patchset)
return 0
num_builders = sum(map(len, buckets.itervalues()))
num_builders = sum(map(len, buckets.values()))
if num_builders > 10:
confirm_or_exit('There are %d builders with failed builds.'
% num_builders, action='continue')
......@@ -4740,7 +4740,7 @@ def CMDtry(parser, args):
print('Scheduling CQ dry run on: %s' % cl.GetIssueURL())
return cl.SetCQState(_CQState.DRY_RUN)
for builders in buckets.itervalues():
for builders in buckets.values():
if any('triggered' in b for b in builders):
print('ERROR You are trying to send a job to a triggered bot. This type '
'of bot requires an initial job from a parent (usually a builder). '
......
......@@ -77,7 +77,7 @@ def main(argv):
current = current_branch()
all_branches = set(branches())
merge_base_map = {b: get_or_create_merge_base(b) for b in all_branches}
merge_base_map = {b: v for b, v in merge_base_map.iteritems() if v}
merge_base_map = {b: v for b, v in merge_base_map.items() if v}
if current in all_branches:
all_branches.remove(current)
all_tags = set(tags())
......@@ -86,7 +86,7 @@ def main(argv):
if merge_base_map:
commit = line[line.find(BRIGHT_RED)+len(BRIGHT_RED):line.find('\t')]
base_for_branches = set()
for branch, sha in merge_base_map.iteritems():
for branch, sha in merge_base_map.items():
if sha.startswith(commit):
base_for_branches.add(branch)
if base_for_branches:
......
......@@ -147,7 +147,7 @@ class BranchMapper(object):
roots = set()
# A map of parents to a list of their children.
for branch, branch_info in self.__branches_info.iteritems():
for branch, branch_info in self.__branches_info.items():
if not branch_info:
continue
......
......@@ -57,12 +57,12 @@ def fetch_remotes(branch_tree):
dest_spec = fetchspec.partition(':')[2]
remote_name = key.split('.')[1]
fetchspec_map[dest_spec] = remote_name
for parent in branch_tree.itervalues():
for parent in branch_tree.values():
if parent in tag_set:
fetch_tags = True
else:
full_ref = git.run('rev-parse', '--symbolic-full-name', parent)
for dest_spec, remote_name in fetchspec_map.iteritems():
for dest_spec, remote_name in fetchspec_map.items():
if fnmatch(full_ref, dest_spec):
remotes.add(remote_name)
break
......@@ -121,7 +121,7 @@ def remove_empty_branches(branch_tree):
reparents[down] = (order, parent, old_parent)
# Apply all reparenting recorded, in order.
for branch, value in sorted(reparents.iteritems(), key=lambda x:x[1][0]):
for branch, value in sorted(reparents.items(), key=lambda x:x[1][0]):
_, parent, old_parent = value
if parent in tag_set:
git.set_branch_config(branch, 'remote', '.')
......@@ -134,7 +134,7 @@ def remove_empty_branches(branch_tree):
old_parent))
# Apply all deletions recorded, in order.
for branch, _ in sorted(deletions.iteritems(), key=lambda x: x[1]):
for branch, _ in sorted(deletions.items(), key=lambda x: x[1]):
print(git.run('branch', '-d', branch))
......@@ -272,7 +272,7 @@ def main(args=None):
fetch_remotes(branch_tree)
merge_base = {}
for branch, parent in branch_tree.iteritems():
for branch, parent in branch_tree.items():
merge_base[branch] = git.get_or_create_merge_base(branch, parent)
logging.debug('branch_tree: %s' % pformat(branch_tree))
......
......@@ -36,7 +36,7 @@ def main(args):
run('branch', '-m', opts.old_name, opts.new_name)
# update the downstreams
for branch, merge in branch_config_map('merge').iteritems():
for branch, merge in branch_config_map('merge').items():
if merge == 'refs/heads/' + opts.old_name:
# Only care about local branches
if branch_config(branch, 'remote') == '.':
......
......@@ -37,7 +37,7 @@ praw() {
pcommand() {
praw "$(python -c '\
import sys, pipes; \
print " ".join(map(pipes.quote, sys.argv[1:]))' "$@")"
print(" ".join(map(pipes.quote, sys.argv[1:])))' "$@")"
}
# run a visible command
......@@ -71,7 +71,7 @@ add() {
if [[ ! $CONTENT ]]
then
CONTENT=$(python -c 'import random, string; \
print "".join(random.sample(string.lowercase, 16))')
print("".join(random.sample(string.lowercase, 16)))')
fi
echo "$CONTENT" > $1
silent git add $1
......
......@@ -603,7 +603,7 @@ class MyActivity(object):
project, issue_id = issue_uid.split(':')
missing_issues_by_project[project].append(issue_id)
for project, issue_ids in missing_issues_by_project.iteritems():
for project, issue_ids in missing_issues_by_project.items():
self.referenced_issues += self.monorail_get_issues(project, issue_ids)
def print_issues(self):
......@@ -676,7 +676,7 @@ class MyActivity(object):
if not url:
raise Exception('Dumped item %s does not specify url' % item)
output[url] = dict(
(k, v) for k,v in item.iteritems() if k not in ignore_keys)
(k, v) for k,v in item.items() if k not in ignore_keys)
return output
class PythonObjectEncoder(json.JSONEncoder):
......
......@@ -271,7 +271,7 @@ class Database(object):
while True:
dir_owner_rules = self._paths_to_owners.get(dirname)
if dir_owner_rules:
for owned_path, path_owners in dir_owner_rules.iteritems():
for owned_path, path_owners in dir_owner_rules.items():
if self._fnmatch(objname, owned_path):
obj_owners |= path_owners
up_dirname = self.os_path.dirname(dirname)
......@@ -539,7 +539,7 @@ class Database(object):
# Merge the parent information with our information, adjusting
# distances as necessary, and replacing the parent directory
# names with our names.
for owner, par_dir_and_distances in parent_res.iteritems():
for owner, par_dir_and_distances in parent_res.items():
if owner in res:
# If the same person is in multiple OWNERS files above a given
# directory, only count the closest one.
......@@ -564,7 +564,7 @@ class Database(object):
dir_owners = self._all_possible_owners_for_dir_or_file(
current_dir, author,
all_possible_owners_for_dir_or_file_cache)
for owner, dir_and_distance in dir_owners.iteritems():
for owner, dir_and_distance in dir_owners.items():
if owner in all_possible_owners:
all_possible_owners[owner].append(dir_and_distance)
else:
......@@ -605,7 +605,7 @@ class Database(object):
total_costs_by_owner = Database.total_costs_by_owner(all_possible_owners,
dirs)
# Return the lowest cost owner. In the case of a tie, pick one randomly.
lowest_cost = min(total_costs_by_owner.itervalues())
lowest_cost = min(total_costs_by_owner.values())
lowest_cost_owners = filter(
lambda owner: total_costs_by_owner[owner] == lowest_cost,
total_costs_by_owner)
......
......@@ -949,7 +949,7 @@ def CheckBuildbotPendingBuilds(input_api, output_api, url, max_pendings,
'looking up buildbot status')]
out = []
for (builder_name, builder) in data.iteritems():
for (builder_name, builder) in data.items():
if builder_name in ignored:
continue
if builder.get('state', '') == 'offline':
......@@ -1326,7 +1326,7 @@ def CheckCIPDPackages(input_api, output_api, platforms, packages):
manifest = []
for p in platforms:
manifest.append('$VerifiedPlatform %s' % (p,))
for k, v in packages.iteritems():
for k, v in packages.items():
manifest.append('%s %s' % (k, v))
return CheckCIPDManifest(input_api, output_api, content='\n'.join(manifest))
......@@ -1468,7 +1468,7 @@ def CheckChangedLUCIConfigs(input_api, output_api):
# windows
file_path = f.LocalPath().replace(_os.sep, '/')
logging.debug('Affected file path: %s', file_path)
for dr, cs in dir_to_config_set.iteritems():
for dr, cs in dir_to_config_set.items():
if dr == '/' or file_path.startswith(dr):
cs_to_files[cs].append({
'path': file_path[len(dr):] if dr != '/' else file_path,
......@@ -1476,7 +1476,7 @@ def CheckChangedLUCIConfigs(input_api, output_api):
'\n'.join(f.NewContents()).encode('utf-8'))
})
outputs = []
for cs, f in cs_to_files.iteritems():
for cs, f in cs_to_files.items():
try:
# TODO(myjang): parallelize
res = request(
......
......@@ -386,7 +386,7 @@ class GerritAccessor(object):
# Find revision info for the patchset we want.
if patchset is not None:
for rev, rev_info in info['revisions'].iteritems():
for rev, rev_info in info['revisions'].items():
if str(rev_info['_number']) == str(patchset):
break
else:
......@@ -1279,10 +1279,10 @@ class GetPostUploadExecuter(object):
def _MergeMasters(masters1, masters2):
"""Merges two master maps. Merges also the tests of each builder."""
result = {}
for (master, builders) in itertools.chain(masters1.iteritems(),
masters2.iteritems()):
for (master, builders) in itertools.chain(masters1.items(),
masters2.items()):
new_builders = result.setdefault(master, {})
for (builder, tests) in builders.iteritems():
for (builder, tests) in builders.items():
new_builders.setdefault(builder, set([])).update(tests)
return result
......@@ -1329,7 +1329,7 @@ def DoGetTryMasters(change,
presubmit_script, filename, project, change))
# Make sets to lists again for later JSON serialization.
for builders in results.itervalues():
for builders in results.values():
for builder in builders:
builders[builder] = list(builders[builder])
......@@ -1659,7 +1659,7 @@ def canned_check_filter(method_names):
setattr(presubmit_canned_checks, method_name, lambda *_a, **_kw: [])
yield
finally:
for name, method in filtered.iteritems():
for name, method in filtered.items():
setattr(presubmit_canned_checks, name, method)
......
......@@ -183,7 +183,7 @@ class BotUpdateApi(recipe_api.RecipeApi):
# Only update with non-empty values. Some recipe might otherwise
# overwrite the HEAD default with an empty string.
revisions.update(
(k, v) for k, v in cfg.revisions.iteritems() if v)
(k, v) for k, v in cfg.revisions.items() if v)
if cfg.solutions and root_solution_revision:
revisions[first_sol] = root_solution_revision
# Allow for overrides required to bisect into rolls.
......@@ -275,7 +275,7 @@ class BotUpdateApi(recipe_api.RecipeApi):
if update_presentation:
# Set properties such as got_revision.
for prop_name, prop_value in (
self.last_returned_properties.iteritems()):
self.last_returned_properties.items()):
step_result.presentation.properties[prop_name] = prop_value
# Add helpful step description in the step UI.
......@@ -471,7 +471,7 @@ class BotUpdateApi(recipe_api.RecipeApi):
rev_reverse_map = self.m.gclient.got_revision_reverse_mapping(cfg)
return sorted(
prop
for prop, project in rev_reverse_map.iteritems()
for prop, project in rev_reverse_map.items()
if project == project_name
)
......
......@@ -20,12 +20,12 @@ class BotUpdateTestApi(recipe_test_api.RecipeTestApi):
properties = {
property_name: self.gen_revision(project_name)
for property_name, project_name in revision_mapping.iteritems()
for property_name, project_name in revision_mapping.items()
}
properties.update({
'%s_cp' % property_name: ('refs/heads/master@{#%s}' %
self.gen_commit_position(project_name))
for property_name, project_name in revision_mapping.iteritems()
for property_name, project_name in revision_mapping.items()
})
output.update({
......
......@@ -36,7 +36,7 @@ def check_list_type(name, var, expect_inner):
def check_dict_type(name, var, expect_key, expect_value):
check_type(name, var, dict)
for key, value in var.iteritems():
for key, value in var.items():
check_type('%s: key' % name, key, expect_key)
check_type('%s[%s]' % (name, key), value, expect_value)
......
......@@ -133,7 +133,7 @@ class GclientApi(recipe_api.RecipeApi):
rev_map = cfg.got_revision_mapping.as_jsonish()
reverse_rev_map = cfg.got_revision_reverse_mapping.as_jsonish()
combined_length = len(rev_map) + len(reverse_rev_map)
reverse_rev_map.update({v: k for k, v in rev_map.iteritems()})
reverse_rev_map.update({v: k for k, v in rev_map.items()})
# Make sure we never have duplicate values in the old map.
assert combined_length == len(reverse_rev_map)
......@@ -194,7 +194,7 @@ class GclientApi(recipe_api.RecipeApi):
result = self.m.step.active_result
solutions = result.json.output['solutions']
for propname, path in sorted(
self.got_revision_reverse_mapping(cfg).iteritems()):
self.got_revision_reverse_mapping(cfg).items()):
# gclient json paths always end with a slash
info = solutions.get(path + '/') or solutions.get(path)
if info:
......@@ -213,7 +213,7 @@ class GclientApi(recipe_api.RecipeApi):
"""
cfg = gclient_config or self.c
for prop, custom_var in cfg.parent_got_revision_mapping.iteritems():
for prop, custom_var in cfg.parent_got_revision_mapping.items():
val = str(self.m.properties.get(prop, ''))
# TODO(infra): Fix coverage.
if val: # pragma: no cover
......@@ -282,7 +282,7 @@ class GclientApi(recipe_api.RecipeApi):
for cur_file in files:
if cur_file.endswith('index.lock'):
path_to_file = os.path.join(path, cur_file)
print 'deleting %s' % path_to_file
print('deleting %s' % path_to_file)
os.remove(path_to_file)
""",
args=[self.m.path['start_dir']],
......@@ -336,7 +336,7 @@ class GclientApi(recipe_api.RecipeApi):
return rel_path
# repo_path_map keys may be non-canonical.
for key, (rel_path, _) in cfg.repo_path_map.iteritems():
for key, (rel_path, _) in cfg.repo_path_map.items():
if self._canonicalize_repo_url(key) == repo_url:
return rel_path
......
......@@ -197,7 +197,7 @@
"cmd": [
"python",
"-u",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print('deleting %s' % path_to_file)\n os.remove(path_to_file)\n",
"[START_DIR]"
],
"infra_step": true,
......@@ -212,7 +212,7 @@
"@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@",
"@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@",
"@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@",
"@@@STEP_LOG_LINE@python.inline@ print('deleting %s' % path_to_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@",
"@@@STEP_LOG_END@python.inline@@@"
]
......
......@@ -197,7 +197,7 @@
"cmd": [
"python",
"-u",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print('deleting %s' % path_to_file)\n os.remove(path_to_file)\n",
"[START_DIR]"
],
"infra_step": true,
......@@ -212,7 +212,7 @@
"@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@",
"@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@",
"@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@",
"@@@STEP_LOG_LINE@python.inline@ print('deleting %s' % path_to_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@",
"@@@STEP_LOG_END@python.inline@@@"
]
......
......@@ -199,7 +199,7 @@
"cmd": [
"python",
"-u",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print('deleting %s' % path_to_file)\n os.remove(path_to_file)\n",
"[START_DIR]"
],
"infra_step": true,
......@@ -214,7 +214,7 @@
"@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@",
"@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@",
"@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@",
"@@@STEP_LOG_LINE@python.inline@ print('deleting %s' % path_to_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@",
"@@@STEP_LOG_END@python.inline@@@"
]
......
......@@ -197,7 +197,7 @@
"cmd": [
"python",
"-u",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print('deleting %s' % path_to_file)\n os.remove(path_to_file)\n",
"[START_DIR]"
],
"infra_step": true,
......@@ -212,7 +212,7 @@
"@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@",
"@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@",
"@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@",
"@@@STEP_LOG_LINE@python.inline@ print('deleting %s' % path_to_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@",
"@@@STEP_LOG_END@python.inline@@@"
]
......
......@@ -105,7 +105,7 @@ class GerritApi(recipe_api.RecipeApi):
o_params=['ALL_REVISIONS', 'ALL_COMMITS'],
limit=1)
cl = cls[0] if len(cls) == 1 else {'revisions': {}}
for ri in cl['revisions'].itervalues():
for ri in cl['revisions'].values():
# TODO(tandrii): add support for patchset=='current'.
if str(ri['_number']) == str(patchset):
return ri
......
......@@ -18,7 +18,7 @@ class GitApi(recipe_api.RecipeApi):
git_cmd = ['git']
options = kwargs.pop('git_config_options', {})
for k, v in sorted(options.iteritems()):
for k, v in sorted(options.items()):
git_cmd.extend(['-c', '%s=%s' % (k, v)])
can_fail_build = kwargs.pop('can_fail_build', True)
try:
......@@ -59,7 +59,7 @@ class GitApi(recipe_api.RecipeApi):
"""
if previous_result:
assert isinstance(previous_result, dict)
assert all(isinstance(v, long) for v in previous_result.itervalues())
assert all(isinstance(v, long) for v in previous_result.values())
assert 'size' in previous_result
assert 'size-pack' in previous_result
......@@ -78,14 +78,14 @@ class GitApi(recipe_api.RecipeApi):
result[name] = long(value.strip())
def results_to_text(results):
return [' %s: %s' % (k, v) for k, v in results.iteritems()]
return [' %s: %s' % (k, v) for k, v in results.items()]
step_result.presentation.logs['result'] = results_to_text(result)
if previous_result:
delta = {
key: value - previous_result[key]
for key, value in result.iteritems()
for key, value in result.items()
if key in previous_result}
step_result.presentation.logs['delta'] = (
['before:'] + results_to_text(previous_result) +
......
......@@ -167,12 +167,12 @@ def main(arguments):
em = tf._extract_member
def _extract_member(tarinfo, targetpath):
if not os.path.abspath(targetpath).startswith(args.extract_to):
print 'Skipping %s' % (tarinfo.name,)
print('Skipping %s' % (tarinfo.name,))
ret['skipped']['filecount'] += 1
ret['skipped']['bytes'] += tarinfo.size
ret['skipped']['names'].append(tarinfo.name)
return
print 'Extracting %s' % (tarinfo.name,)
print('Extracting %s' % (tarinfo.name,))
ret['extracted']['filecount'] += 1
ret['extracted']['bytes'] += tarinfo.size
return em(tarinfo, targetpath)
......
......@@ -160,7 +160,7 @@ class GSUtilApi(recipe_api.RecipeApi):
def _generate_metadata_args(self, metadata):
result = []
if metadata:
for k, v in sorted(metadata.iteritems(), key=lambda (k, _): k):
for k, v in sorted(metadata.items(), key=lambda (k, _): k):
field = self._get_metadata_field(k)
param = (field) if v is None else ('%s:%s' % (field, v))
result += ['-h', param]
......
......@@ -89,7 +89,7 @@ class TryserverApi(recipe_api.RecipeApi):
self._gerrit_change_target_ref = (
'refs/heads/' + self._gerrit_change_target_ref)
for rev in res['revisions'].itervalues():
for rev in res['revisions'].values():
if int(rev['_number']) == self.gerrit_change.patchset:
self._gerrit_change_fetch_ref = rev['ref']
break
......
......@@ -171,7 +171,7 @@ def finalize(commit_msg, current_dir, rolls):
# Pull the dependency to the right revision. This is surprising to users
# otherwise.
for _head, roll_to, full_dir in sorted(rolls.itervalues()):
for _head, roll_to, full_dir in sorted(rolls.values()):
check_call(['git', 'checkout', '--quiet', roll_to], cwd=full_dir)
......@@ -249,7 +249,7 @@ def main():
logs = []
setdep_args = []
for dependency, (head, roll_to, full_dir) in sorted(rolls.iteritems()):
for dependency, (head, roll_to, full_dir) in sorted(rolls.items()):
log = generate_commit_message(
full_dir, dependency, head, roll_to, args.no_log, args.log_limit)
logs.append(log)
......
......@@ -229,7 +229,7 @@ def SplitCl(description_file, comment_file, changelist, cmd_upload, dry_run,
return 0
for cl_index, (directory, files) in \
enumerate(files_split_by_owners.iteritems(), 1):
enumerate(files_split_by_owners.items(), 1):
# Use '/' as a path separator in the branch name and the CL description
# and comment.
directory = directory.replace(os.path.sep, '/')
......
......@@ -26,8 +26,8 @@ class AutoStubMixIn(object):
def tearDown(self):
"""Restore all the mocked members."""
if self._saved:
for obj, items in self._saved.iteritems():
for member, previous_value in items.iteritems():
for obj, items in self._saved.items():
for member, previous_value in items.items():
setattr(obj, member, previous_value)
......@@ -57,7 +57,7 @@ class SimpleMock(object):
"""Registers the name of the caller function."""
caller_name = kwargs.pop('caller_name', None) or inspect.stack()[1][3]
str_args = ', '.join(repr(arg) for arg in args)
str_kwargs = ', '.join('%s=%r' % (k, v) for k, v in kwargs.iteritems())
str_kwargs = ', '.join('%s=%r' % (k, v) for k, v in kwargs.items())
self.calls.append('%s(%s)' % (
caller_name, ', '.join(filter(None, [str_args, str_kwargs]))))
......
......@@ -467,7 +467,7 @@ deps = {
pre_deps_hooks = [
{
'action': ['python', '-c',
'print "pre-deps hook"; open(\\'src/git_pre_deps_hooked\\', \\'w\\').write(\\'git_pre_deps_hooked\\')'],
'print("pre-deps hook"); open(\\'src/git_pre_deps_hooked\\', \\'w\\').write(\\'git_pre_deps_hooked\\')'],
}
]
""" % {
......@@ -489,7 +489,7 @@ deps = {
pre_deps_hooks = [
{
'action': ['python', '-c',
'print "pre-deps hook"; open(\\'src/git_pre_deps_hooked\\', \\'w\\').write(\\'git_pre_deps_hooked\\')'],
'print("pre-deps hook"); open(\\'src/git_pre_deps_hooked\\', \\'w\\').write(\\'git_pre_deps_hooked\\')'],
},
{
'action': ['python', '-c', 'import sys; sys.exit(1)'],
......
......@@ -1922,7 +1922,7 @@ the current line as well!
"#!/bin/python\n"
"# Copyright (c) 2037 Nobody.\n"
"# All Rights Reserved.\n"
"print 'foo'\n"
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 2037 Nobody." "\n"
......@@ -1935,7 +1935,7 @@ the current line as well!
"#!/bin/python\n"
"# Copyright (c) 2037 Nobody.\n"
"# All Rights Reserved.\n"
"print 'foo'\n"
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 0007 Nobody." "\n"
......@@ -1949,7 +1949,7 @@ the current line as well!
"#!/bin/python\n"
"# Copyright (c) 2037 Nobody.\n"
"# All Rights Reserved.\n"
"print 'foo'\n"
"print('foo')\n"
)
license_text = (
r".*? Copyright \(c\) 0007 Nobody." "\n"
......
......@@ -94,7 +94,7 @@ class Watchlists(object):
# Compile the regular expressions ahead of time to avoid creating them
# on-the-fly multiple times per file.
self._path_regexps = {}
for name, rule in defns.iteritems():
for name, rule in defns.items():
filepath = rule.get('filepath')
if not filepath:
continue
......@@ -117,7 +117,7 @@ class Watchlists(object):
watchers = set() # A set, to avoid duplicates
for path in paths:
path = path.replace(os.sep, '/')
for name, rule in self._path_regexps.iteritems():
for name, rule in self._path_regexps.items():
if name not in self._watchlists:
continue
if rule.search(path):
......
......@@ -343,24 +343,24 @@ def GenerateSetEnvCmd(target_dir):
with open(set_env_prefix + '.cmd', 'w') as f:
f.write('@echo off\n'
':: Generated by win_toolchain\\package_from_installed.py.\n')
for var, dirs in env.iteritems():
for var, dirs in env.items():
f.write('set %s=%s\n' % (var, BatDirs(dirs)))
f.write('if "%1"=="/x64" goto x64\n')
f.write('if "%1"=="/arm64" goto arm64\n')
for var, dirs in env_x86.iteritems():
for var, dirs in env_x86.items():
f.write('set %s=%s%s\n' % (
var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else ''))
f.write('goto :EOF\n')
f.write(':x64\n')
for var, dirs in env_x64.iteritems():
for var, dirs in env_x64.items():
f.write('set %s=%s%s\n' % (
var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else ''))
f.write('goto :EOF\n')
f.write(':arm64\n')
for var, dirs in env_arm64.iteritems():
for var, dirs in env_arm64.items():
f.write('set %s=%s%s\n' % (
var, BatDirs(dirs), ';%PATH%' if var == 'PATH' else ''))
f.write('goto :EOF\n')
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment