Commit 925cedba authored by Quinten Yearsley's avatar Quinten Yearsley Committed by LUCI CQ

Run spellchecker on depot_tools

This was made by running `codespell` and `scspell`
and then checking the results.

Change-Id: I169fd5b40294f83015075b4a899fbca263821f25
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/2144602
Commit-Queue: Quinten Yearsley <qyearsley@chromium.org>
Auto-Submit: Quinten Yearsley <qyearsley@chromium.org>
Reviewed-by: 's avatarEdward Lesmes <ehmaldonado@chromium.org>
parent 4f3aff5f
......@@ -136,7 +136,7 @@ if not j_specified and not t_specified:
# the whole output is the command.
# On Linux and Mac, if people put depot_tools in directories with ' ',
# shell would misunderstand ' ' as a path separation.
# TODO(yyanagisawa): provide proper quating for Windows.
# TODO(yyanagisawa): provide proper quoting for Windows.
# see https://cs.chromium.org/chromium/src/tools/mb/mb.py
for i in range(len(args)):
if (i == 0 and sys.platform.startswith('win')) or ' ' in args[i]:
......
......@@ -19,7 +19,7 @@
# cipd ensure-file-resolve -ensure-file cipd_manifest.txt
$ResolvedVersions cipd_manifest.versions
# Fully supported plaforms.
# Fully supported platforms.
$VerifiedPlatform linux-amd64 mac-amd64 windows-amd64 windows-386
# Platform with best-effort support: we have some binaries cross-compiled for
......
......@@ -382,7 +382,7 @@ def _data_exists(input_sha1_sum, output_filename, extract):
input_sha1_sum: Expected sha1 stored on disk.
output_filename: The file to potentially download later. Its sha1 will be
compared to input_sha1_sum.
extract: Wheather or not a downloaded file should be extracted. If the file
extract: Whether or not a downloaded file should be extracted. If the file
is not extracted, this just compares the sha1 of the file. If the file
is to be extracted, this only compares the sha1 of the target archive if
the target directory already exists. The content of the target directory
......
......@@ -250,7 +250,7 @@ class WinUnicodeOutput(WinUnicodeOutputBase):
"""Output adaptor to a file output on Windows.
If the standard FileWrite function is used, it will be encoded in the current
code page. WriteConsoleW() permits writting any character.
code page. WriteConsoleW() permits writing any character.
"""
def __init__(self, stream, fileno, encoding):
super(WinUnicodeOutput, self).__init__(
......
......@@ -78,7 +78,7 @@
#
# Specifying a target CPU
# To specify a target CPU, the variables target_cpu and target_cpu_only
# are available and are analagous to target_os and target_os_only.
# are available and are analogous to target_os and target_os_only.
from __future__ import print_function
......@@ -550,7 +550,7 @@ class Dependency(gclient_utils.WorkItem, DependencySettings):
# thus unsorted, while the .gclient format is a list thus sorted.
#
# Interestingly enough, the following condition only works in the case we
# want: self is a 2nd level node. 3nd level node wouldn't need this since
# want: self is a 2nd level node. 3rd level node wouldn't need this since
# they already have their parent as a requirement.
if self.parent and self.parent.parent and not self.parent.parent.parent:
requirements |= set(i.name for i in self.root.dependencies if i.name)
......@@ -3086,7 +3086,7 @@ class OptionParser(optparse.OptionParser):
level=levels[min(options.verbose, len(levels) - 1)],
format='%(module)s(%(lineno)d) %(funcName)s:%(message)s')
if options.config_filename and options.spec:
self.error('Cannot specifiy both --gclientfile and --spec')
self.error('Cannot specify both --gclientfile and --spec')
if (options.config_filename and
options.config_filename != os.path.basename(options.config_filename)):
self.error('--gclientfile target must be a filename, not a path')
......
......@@ -454,7 +454,7 @@ def Parse(content, filename, vars_override=None, builtin_vars=None):
"""Parses DEPS strings.
Executes the Python-like string stored in content, resulting in a Python
dictionary specifyied by the schema above. Supports syntax validation and
dictionary specified by the schema above. Supports syntax validation and
variable expansion.
Args:
......
......@@ -390,7 +390,7 @@ class GitWrapper(SCMWrapper):
# remote ref for it, since |target_rev| might point to a local ref which
# is not up to date with the corresponding remote ref.
remote_ref = ''.join(scm.GIT.RefToRemoteRef(target_rev, self.remote))
self.Print('Trying the correspondig remote ref for %r: %r\n' % (
self.Print('Trying the corresponding remote ref for %r: %r\n' % (
target_rev, remote_ref))
if scm.GIT.IsValidRevision(self.checkout_path, remote_ref):
target_rev = remote_ref
......@@ -854,7 +854,7 @@ class GitWrapper(SCMWrapper):
if not os.path.isdir(self.checkout_path):
# revert won't work if the directory doesn't exist. It needs to
# checkout instead.
self.Print('_____ %s is missing, synching instead' % self.relpath)
self.Print('_____ %s is missing, syncing instead' % self.relpath)
# Don't reuse the args.
return self.update(options, [], file_list)
......@@ -1279,7 +1279,7 @@ class GitWrapper(SCMWrapper):
Args:
options: The configured option set
ref: (str) The branch/commit to checkout
quiet: (bool/None) Whether or not the checkout shoud pass '--quiet'; if
quiet: (bool/None) Whether or not the checkout should pass '--quiet'; if
'None', the behavior is inferred from 'options.verbose'.
Returns: (str) The output of the checkout operation
"""
......
......@@ -324,7 +324,7 @@ def rmtree(path):
def safe_makedirs(tree):
"""Creates the directory in a safe manner.
Because multiple threads can create these directories concurently, trap the
Because multiple threads can create these directories concurrently, trap the
exception and pass on.
"""
count = 0
......@@ -456,7 +456,7 @@ class Annotated(Wrapper):
finally:
self.lock.release()
# Don't keep the lock while writting. Will append \n when it shouldn't.
# Don't keep the lock while writing. Will append \n when it shouldn't.
for orphan in orphans:
if orphan[1]:
self._wrapped_write(b'%d>%s\n' % (orphan[0], orphan[1]))
......@@ -1205,7 +1205,7 @@ def FindExecutable(executable):
for path_folder in path_folders:
target = os.path.join(path_folder, executable)
# Just incase we have some ~/blah paths.
# Just in case we have some ~/blah paths.
target = os.path.abspath(os.path.expanduser(target))
if os.path.isfile(target) and os.access(target, os.X_OK):
return target
......
......@@ -563,7 +563,7 @@ def GenerateAllChanges(host, params, first_param=None, limit=500,
# (say user posting comment), subsequent calls may overalp like this:
# > initial order ABCDEFGH
# query[0..3] => ABC
# > E get's updated. New order: EABCDFGH
# > E gets updated. New order: EABCDFGH
# query[3..6] => CDF # C is a dup
# query[6..9] => GH # E is missed.
page = QueryChanges(host, params, first_param, limit, o_params,
......@@ -850,7 +850,7 @@ def ResetReviewLabels(host, change, label, value='0', message=None,
'%s label set to %s programmatically.' % (label, value))
jmsg = GetReview(host, change, revision)
if not jmsg:
raise GerritError(200, 'Could not get review information for revison %s '
raise GerritError(200, 'Could not get review information for revision %s '
'of change %s' % (revision, change))
for review in jmsg.get('labels', {}).get(label, {}).get('all', []):
if str(review.get('value', value)) != value:
......
......@@ -469,7 +469,7 @@ def freeze():
.git/info/exclude
file. See `git help ignore` for the format of this file.
If this data is indended as part of your commit, you may adjust the
If this data is intended as part of your commit, you may adjust the
freeze limit by running:
git config %s <new_limit>
Where <new_limit> is an integer threshold in megabytes.""",
......
......@@ -76,7 +76,7 @@ def split_footers(message):
maybe_footer_lines = []
footer_lines.append(line)
else:
# We only want to include malformed lines if they are preceeded by
# We only want to include malformed lines if they are preceded by
# well-formed lines. So keep them in holding until we see a well-formed
# line (case above).
maybe_footer_lines.append(line)
......
......@@ -44,7 +44,7 @@ AUTHOR_NAME = 'git-number'
AUTHOR_EMAIL = 'chrome-infrastructure-team@google.com'
# Number of bytes to use for the prefix on our internal number structure.
# 0 is slow to deserialize. 2 creates way too much bookeeping overhead (would
# 0 is slow to deserialize. 2 creates way too much bookkeeping overhead (would
# need to reimplement cache data structures to be a bit more sophisticated than
# dicts. 1 seems to be just right.
PREFIX_LEN = 1
......@@ -164,7 +164,7 @@ def finalize(targets):
tree_id = git.run('write-tree', env=env)
commit_cmd = [
# Git user.name and/or user.email may not be configured, so specifying
# them explicitly. They are not used, but requried by Git.
# them explicitly. They are not used, but required by Git.
'-c', 'user.name=%s' % AUTHOR_NAME,
'-c', 'user.email=%s' % AUTHOR_EMAIL,
'commit-tree',
......@@ -217,7 +217,7 @@ def load_generation_numbers(targets):
empty = git.mktree({})
commit_hash = git.run(
# Git user.name and/or user.email may not be configured, so specifying
# them explicitly. They are not used, but requried by Git.
# them explicitly. They are not used, but required by Git.
'-c', 'user.name=%s' % AUTHOR_NAME,
'-c', 'user.email=%s' % AUTHOR_EMAIL,
'commit-tree',
......
......@@ -190,7 +190,7 @@ class MetricsCollector(object):
def _collect_metrics(self, func, command_name, *args, **kwargs):
# If we're already collecting metrics, just execute the function.
# e.g. git-cl split invokes git-cl upload several times to upload each
# splitted CL.
# split CL.
if self.collecting_metrics:
# Don't collect metrics for this function.
# e.g. Don't record the arguments git-cl split passes to git-cl upload.
......
......@@ -579,7 +579,7 @@ class Database(object):
return all_possible_owners
def _fnmatch(self, filename, pattern):
"""Same as fnmatch.fnmatch(), but interally caches the compiled regexes."""
"""Same as fnmatch.fnmatch(), but internally caches the compiled regexes."""
matcher = self._fnmatch_cache.get(pattern)
if matcher is None:
matcher = re.compile(fnmatch.translate(pattern)).match
......
......@@ -466,7 +466,7 @@ Args:
to a local path, may cause problem with scripts that do
"git fetch origin" or "git push origin".
* arbitrary refs such refs/whatever/not-fetched-by-default-to-cache
progress (bool): wether to show progress for fetch or not
progress (bool): whether to show progress for fetch or not
tags (bool): Also fetch tags.
Returns: If the checkout was successful, this returns the commit hash of
......
......@@ -443,7 +443,7 @@ def create_manifest_old():
# TODO(hinoka): Include patch revision.
def create_manifest(gclient_output, patch_root):
"""Return the JSONPB equivilent of the source manifest proto.
"""Return the JSONPB equivalent of the source manifest proto.
The source manifest proto is defined here:
https://chromium.googlesource.com/infra/luci/recipes-py/+/master/recipe_engine/source_manifest.proto
......@@ -746,7 +746,7 @@ def _git_checkout(sln, sln_dir, revisions, refs, no_fetch_tags, git_cache_dir,
git('clean', '-dff', cwd=sln_dir)
return
except SubprocessFailed as e:
# Exited abnormally, theres probably something wrong.
# Exited abnormally, there's probably something wrong.
print('Something failed: %s.' % str(e))
if first_try:
first_try = False
......@@ -1012,7 +1012,7 @@ def parse_args():
options.revision_mapping = json.load(f)
except Exception as e:
print(
'WARNING: Caught execption while parsing revision_mapping*: %s'
'WARNING: Caught exception while parsing revision_mapping*: %s'
% (str(e),))
# Because we print CACHE_DIR out into a .gclient file, and then later run
......@@ -1164,7 +1164,7 @@ def main():
# Check if this script should activate or not.
active = True
# Print a helpful message to tell developers whats going on with this step.
# Print a helpful message to tell developers what's going on with this step.
print_debug_info()
# Parse, manipulate, and print the gclient solutions.
......
......@@ -130,7 +130,7 @@ class PackageDefinition(object):
version information about themselves. <name> could be the name of the
binary tool, like 'cipd' in the example above.
A version file may be specifed exactly once per package.
A version file may be specified exactly once per package.
Args:
ver_file_rel (str) - A path string relative to the installation root.
......
......@@ -50,7 +50,7 @@ def BaseConfig(USE_MIRROR=True, CACHE_DIR=None,
# Maps build_property -> 'solution'
got_revision_reverse_mapping = Dict(hidden=True),
# Addition revisions we want to pass in. For now theres a duplication
# Addition revisions we want to pass in. For now there's a duplication
# of code here of setting custom vars AND passing in --revision. We hope
# to remove custom vars later.
revisions = Dict(
......
......@@ -148,7 +148,7 @@ class GitApi(recipe_api.RecipeApi):
to a local path, may cause problem with scripts that do
"git fetch origin" or "git push origin".
* arbitrary refs such refs/whatever/not-fetched-by-default-to-cache
progress (bool): wether to show progress for fetch or not
progress (bool): whether to show progress for fetch or not
tags (bool): Also fetch tags.
Returns: If the checkout was successful, this returns the commit hash of
......
......@@ -23,7 +23,7 @@ class Gitiles(recipe_api.RecipeApi):
* archive - implies the response is a compressed tarball; requires
`extract_to`.
extract_to (Path): When fmt=='archive', instructs gitiles_client to
extract the archive to this non-existant folder.
extract the archive to this non-existent folder.
log_limit: for log URLs, limit number of results. None implies 1 page,
as returned by Gitiles.
log_start: for log URLs, the start cursor for paging.
......
......@@ -75,7 +75,7 @@ class TryserverApi(recipe_api.RecipeApi):
query_params=[('change', cl.change)],
# This list must remain static/hardcoded.
# If you need extra info, either change it here (hardcoded) or
# fetch separetely.
# fetch separately.
o_params=['ALL_REVISIONS', 'DOWNLOAD_COMMANDS'],
limit=1,
name='fetch current CL info',
......
......@@ -207,7 +207,7 @@ class GIT(object):
@staticmethod
def FetchUpstreamTuple(cwd, branch=None):
"""Returns a tuple containg remote and remote ref,
"""Returns a tuple containing remote and remote ref,
e.g. 'origin', 'refs/heads/master'
"""
try:
......
......@@ -66,7 +66,7 @@ def epilog(text):
def CMDhelp(parser, args):
"""Prints list of commands or help for a specific command."""
# This is the default help implementation. It can be disabled or overriden if
# This is the default help implementation. It can be disabled or overridden if
# wanted.
if not any(i in ('-h', '--help') for i in args):
args = args + ['--help']
......@@ -235,14 +235,14 @@ class CommandDispatcher(object):
if args:
if args[0] in ('-h', '--help') and len(args) > 1:
# Inverse the argument order so 'tool --help cmd' is rewritten to
# Reverse the argument order so 'tool --help cmd' is rewritten to
# 'tool cmd --help'.
args = [args[1], args[0]] + args[2:]
command = self.find_nearest_command(args[0])
if command:
if command.__name__ == 'CMDhelp' and len(args) > 1:
# Inverse the arguments order so 'tool help cmd' is rewritten to
# 'tool cmd --help'. Do it here since we want 'tool hel cmd' to work
# Reverse the argument order so 'tool help cmd' is rewritten to
# 'tool cmd --help'. Do it here since we want 'tool help cmd' to work
# too.
args = [args[1], '--help'] + args[2:]
command = self.find_nearest_command(args[0]) or command
......
......@@ -816,7 +816,7 @@ class FakeReposTestBase(trial_dir.TestCase):
"""Prints the diffs to ease debugging."""
self.assertEqual(expected.splitlines(), result.splitlines(), msg)
if expected != result:
# Strip the begining
# Strip the beginning
while expected and result and expected[0] == result[0]:
expected = expected[1:]
result = result[1:]
......
......@@ -176,7 +176,7 @@ mkdir -p "${rundir}/tmp"
ssh-keygen -t rsa -q -f "${rundir}/tmp/id_rsa" -N ""
ssh_public_key="$(cat ${rundir}/tmp/id_rsa.pub)"
# Set up the first user, with admin priveleges.
# Set up the first user, with admin privileges.
cat <<EOF | java -jar "$gerrit_exe" gsql -d "${rundir}" > /dev/null
INSERT INTO ACCOUNTS (FULL_NAME, MAXIMUM_PAGE_SIZE, PREFERRED_EMAIL, REGISTERED_ON, ACCOUNT_ID) VALUES ('${full_name}', ${maximum_page_size}, '${preferred_email}', '${registered_on}', ${account_id});
INSERT INTO ACCOUNT_EXTERNAL_IDS (ACCOUNT_ID, EXTERNAL_ID) VALUES (${account_id}, 'gerrit:${username}');
......
......@@ -252,7 +252,7 @@ class GitRepo(object):
...
}
The SPECIAL_KEYs are the following attribues of the GitRepo class:
The SPECIAL_KEYs are the following attributes of the GitRepo class:
* AUTHOR_NAME
* AUTHOR_EMAIL
* AUTHOR_DATE - must be a datetime.datetime instance
......
......@@ -128,7 +128,7 @@ class HasLuciContextLocalAuthTest(unittest.TestCase):
os.environ = {}
self.assertFalse(auth.has_luci_context_local_auth())
def testUnexistentPath(self):
def testNonexistentPath(self):
os.environ = {'LUCI_CONTEXT': 'path'}
open.side_effect = OSError
self.assertFalse(auth.has_luci_context_local_auth())
......
......@@ -820,7 +820,7 @@ class RevisionTest(unittest.TestCase):
def test_preserves_formatting(self):
before = [
'vars = {',
' # Some coment on deadbeef ',
' # Some comment on deadbeef ',
' "dep_revision": "deadbeef",',
'}',
'deps = {',
......@@ -833,7 +833,7 @@ class RevisionTest(unittest.TestCase):
]
after = [
'vars = {',
' # Some coment on deadbeef ',
' # Some comment on deadbeef ',
' "dep_revision": "deadfeed",',
'}',
'deps = {',
......
......@@ -171,7 +171,7 @@ class GClientSmokeGIT(gclient_smoketest_base.GClientSmokeBase):
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
# Manually remove git_hooked1 before synching to make sure it's not
# Manually remove git_hooked1 before syncing to make sure it's not
# recreated.
os.remove(join(self.root_dir, 'src', 'git_hooked1'))
......@@ -310,7 +310,7 @@ class GClientSmokeGIT(gclient_smoketest_base.GClientSmokeBase):
tree['src/git_hooked2'] = 'git_hooked2'
self.assertTree(tree)
# Manually remove git_hooked1 before synching to make sure it's not
# Manually remove git_hooked1 before syncing to make sure it's not
# recreated.
os.remove(join(self.root_dir, 'src', 'git_hooked1'))
......
......@@ -106,7 +106,7 @@ class GClientSmokeBase(fake_repos.FakeReposTestBase):
results.append([[match.group(1), match.group(2), match.group(3)]])
continue
match = re.match(r'^_____ (.*) is missing, synching instead$', line)
match = re.match(r'^_____ (.*) is missing, syncing instead$', line)
if match:
# Blah, it's when a dependency is deleted, we should probably not
# output this message.
......
......@@ -713,7 +713,7 @@ class GclientTest(trial_dir.TestCase):
This is what we mean to check here:
- |recursedeps| = [...] on 2 levels means we pull exactly 3 deps
(up to /fizz, but not /fuzz)
- pulling foo/bar with no recursion (in .gclient) is overriden by
- pulling foo/bar with no recursion (in .gclient) is overridden by
a later pull of foo/bar with recursion (in the dep tree)
- pulling foo/tar with no recursion (in .gclient) is no recursively
pulled (taz is left out)
......
......@@ -777,7 +777,7 @@ class TestGitCl(unittest.TestCase):
metrics_arguments.append('m')
if short_hostname == 'chromium':
# All reviwers and ccs get into ref_suffix.
# All reviewers and ccs get into ref_suffix.
for r in sorted(reviewers):
ref_suffix += ',r=%s' % r
metrics_arguments.append('r')
......@@ -3437,7 +3437,7 @@ class MakeRequestsHelperTestCase(unittest.TestCase):
])
def testMakeRequestsHelperCategorySet(self):
# The category property can be overriden with options.
# The category property can be overridden with options.
changelist = ChangelistMock(gerrit_change=self.exampleGerritChange())
jobs = [('chromium', 'try', 'my-builder')]
options = optparse.Values({'category': 'my-special-category'})
......
......@@ -42,7 +42,8 @@ class MetricsCollectorTest(unittest.TestCase):
self.FileWrite = mock.Mock()
self.FileRead = mock.Mock()
# So that we don't have to update the tests everytime we change the version.
# So that we don't have to update the tests every time we change the
# version.
mock.patch('metrics.metrics_utils.CURRENT_VERSION', 0).start()
mock.patch('metrics.urllib', self.urllib).start()
mock.patch('metrics.subprocess.Popen', self.Popen).start()
......
......@@ -30,7 +30,7 @@ class MyActivityTest(unittest.TestCase):
self.assertEqual(
(datetime(2020, 7, 1), datetime(2020, 10, 1)),
my_activity.get_quarter_of(datetime(2020, 9, 12)))
# Quarter range includes beggining
# Quarter range includes beginning
self.assertEqual(
(datetime(2020, 10, 1), datetime(2021, 1, 1)),
my_activity.get_quarter_of(datetime(2020, 10, 1)))
......@@ -46,7 +46,7 @@ class MyActivityTest(unittest.TestCase):
self.assertEqual(
(datetime(2020, 1, 1), datetime(2021, 1, 1)),
my_activity.get_year_of(datetime(2020, 9, 12)))
# Year range includes beggining
# Year range includes beginning
self.assertEqual(
(datetime(2020, 1, 1), datetime(2021, 1, 1)),
my_activity.get_year_of(datetime(2020, 1, 1)))
......
......@@ -370,7 +370,7 @@ class OwnersDatabaseTest(_BaseTestCase):
'',
darin,
'',
'# comment preceeded by empty line',
'# comment preceded by empty line',
'per-file bar.*=%s' % jochen,
john,
'',
......@@ -385,8 +385,8 @@ class OwnersDatabaseTest(_BaseTestCase):
self.assertEqual(db.comments, {
ben: {'': 'first comment'},
brett: {'': 'first comment inline comment'},
jochen: {'bar.*': 'comment preceeded by empty line'},
john: {'': 'comment preceeded by empty line'},
jochen: {'bar.*': 'comment preceded by empty line'},
john: {'': 'comment preceded by empty line'},
peter: {'': 'comment in the middle'}})
def test_owners_rooted_at_file(self):
......@@ -500,7 +500,7 @@ class ReviewersForTest(_BaseTestCase):
def test_reviewers_for__ignores_unowned_files(self):
# Clear the root OWNERS file.
self.files['/OWNERS'] = ''
self.assert_reviewers_for(['base/vlog.h', 'chrome/browser/deafults/h'],
self.assert_reviewers_for(['base/vlog.h', 'chrome/browser/defaults/h'],
[[brett]])
def test_reviewers_file_includes__absolute(self):
......
......@@ -1421,7 +1421,7 @@ class InputApiUnittest(PresubmitTestsBase):
'mychange', '', self.fake_root_dir, files, 0, 0, None)
input_api = presubmit.InputApi(
change, './PRESUBMIT.py', False, None, False)
# Sample usage of overiding the default white and black lists.
# Sample usage of overriding the default white and black lists.
got_files = input_api.AffectedSourceFiles(
lambda x: input_api.FilterSourceFile(x, white_list, black_list))
self.assertEqual(len(got_files), 2)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment