Commit 14a83aec authored by Josip Sokcevic's avatar Josip Sokcevic Committed by LUCI CQ

Reland "Use OS level locking in git_cache.py"

This is a reland of d3affaa6

Original change's description:
> Use OS level locking in git_cache.py
> 
> Without OS level locking it's possible to leave "lock" files on disk
> which will prevent next run to acquire those locks. This can easily
> happen if SIGKIL is issued.
> 
> R=apolito@google.com, ehmaldonado@chromium.org
> 
> Bug: 1049610
> Change-Id: Id87aa1376b9ea5ff0c2d14f3603636493ed1dd5b
> Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/2189333
> Reviewed-by: Edward Lesmes <ehmaldonado@chromium.org>
> Reviewed-by: Robbie Iannucci <iannucci@chromium.org>
> Commit-Queue: Josip Sokcevic <sokcevic@google.com>

Bug: 1049610
Change-Id: I58e65a10f7c779e0de1121ba7167c694996e390c
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/tools/depot_tools/+/2211189Reviewed-by: 's avatarEdward Lesmes <ehmaldonado@chromium.org>
Reviewed-by: 's avatarRobbie Iannucci <iannucci@chromium.org>
Commit-Queue: Josip Sokcevic <sokcevic@google.com>
parent 8f6bfe30
...@@ -1407,12 +1407,14 @@ The local checkout in %(checkout_path)s reports: ...@@ -1407,12 +1407,14 @@ The local checkout in %(checkout_path)s reports:
You should ensure that the URL listed in .gclient is correct and either change You should ensure that the URL listed in .gclient is correct and either change
it or fix the checkout. it or fix the checkout.
''' % {'checkout_path': os.path.join(self.root_dir, dep.name), ''' % {
'checkout_path': os.path.join(self.root_dir, dep.name),
'expected_url': dep.url, 'expected_url': dep.url,
'expected_scm': dep.GetScmName(), 'expected_scm': dep.GetScmName(),
'mirror_string': mirror_string, 'mirror_string': mirror_string,
'actual_url': actual_url, 'actual_url': actual_url,
'actual_scm': dep.GetScmName()}) 'actual_scm': dep.GetScmName()
})
def SetConfig(self, content): def SetConfig(self, content):
assert not self.dependencies assert not self.dependencies
...@@ -2687,13 +2689,12 @@ def CMDsync(parser, args): ...@@ -2687,13 +2689,12 @@ def CMDsync(parser, args):
parser.add_option('--no_bootstrap', '--no-bootstrap', parser.add_option('--no_bootstrap', '--no-bootstrap',
action='store_true', action='store_true',
help='Don\'t bootstrap from Google Storage.') help='Don\'t bootstrap from Google Storage.')
parser.add_option('--ignore_locks', action='store_true', parser.add_option('--ignore_locks',
help='GIT ONLY - Ignore cache locks.') action='store_true',
parser.add_option('--break_repo_locks', action='store_true', help='No longer used.')
help='GIT ONLY - Forcibly remove repo locks (e.g. ' parser.add_option('--break_repo_locks',
'index.lock). This should only be used if you know for ' action='store_true',
'certain that this invocation of gclient is the only ' help='No longer used.')
'thing operating on the git repos (e.g. on a bot).')
parser.add_option('--lock_timeout', type='int', default=5000, parser.add_option('--lock_timeout', type='int', default=5000,
help='GIT ONLY - Deadline (in seconds) to wait for git ' help='GIT ONLY - Deadline (in seconds) to wait for git '
'cache lock to become available. Default is %default.') 'cache lock to become available. Default is %default.')
...@@ -2714,6 +2715,13 @@ def CMDsync(parser, args): ...@@ -2714,6 +2715,13 @@ def CMDsync(parser, args):
if not client: if not client:
raise gclient_utils.Error('client not configured; see \'gclient config\'') raise gclient_utils.Error('client not configured; see \'gclient config\'')
if options.ignore_locks:
print('Warning: ignore_locks is no longer used. Please remove its usage.')
if options.break_repo_locks:
print('Warning: break_repo_locks is no longer used. Please remove its '
'usage.')
if options.revisions and options.head: if options.revisions and options.head:
# TODO(maruel): Make it a parser.error if it doesn't break any builder. # TODO(maruel): Make it a parser.error if it doesn't break any builder.
print('Warning: you cannot use both --head and --revision') print('Warning: you cannot use both --head and --revision')
...@@ -2784,12 +2792,14 @@ def CMDrevert(parser, args): ...@@ -2784,12 +2792,14 @@ def CMDrevert(parser, args):
help='don\'t run pre-DEPS hooks', default=False) help='don\'t run pre-DEPS hooks', default=False)
parser.add_option('--upstream', action='store_true', parser.add_option('--upstream', action='store_true',
help='Make repo state match upstream branch.') help='Make repo state match upstream branch.')
parser.add_option('--break_repo_locks', action='store_true', parser.add_option('--break_repo_locks',
help='GIT ONLY - Forcibly remove repo locks (e.g. ' action='store_true',
'index.lock). This should only be used if you know for ' help='No longer used.')
'certain that this invocation of gclient is the only '
'thing operating on the git repos (e.g. on a bot).')
(options, args) = parser.parse_args(args) (options, args) = parser.parse_args(args)
if options.break_repo_locks:
print('Warning: break_repo_locks is no longer used. Please remove its ' +
'usage.')
# --force is implied. # --force is implied.
options.force = True options.force = True
options.reset = False options.reset = False
......
...@@ -982,9 +982,7 @@ class GitWrapper(SCMWrapper): ...@@ -982,9 +982,7 @@ class GitWrapper(SCMWrapper):
mirror.populate(verbose=options.verbose, mirror.populate(verbose=options.verbose,
bootstrap=not getattr(options, 'no_bootstrap', False), bootstrap=not getattr(options, 'no_bootstrap', False),
depth=depth, depth=depth,
ignore_lock=getattr(options, 'ignore_locks', False),
lock_timeout=getattr(options, 'lock_timeout', 0)) lock_timeout=getattr(options, 'lock_timeout', 0))
mirror.unlock()
def _Clone(self, revision, url, options): def _Clone(self, revision, url, options):
"""Clone a git repository from the given URL. """Clone a git repository from the given URL.
......
...@@ -26,6 +26,7 @@ except ImportError: # For Py3 compatibility ...@@ -26,6 +26,7 @@ except ImportError: # For Py3 compatibility
from download_from_google_storage import Gsutil from download_from_google_storage import Gsutil
import gclient_utils import gclient_utils
import lockfile
import subcommand import subcommand
# Analogous to gc.autopacklimit git config. # Analogous to gc.autopacklimit git config.
...@@ -40,9 +41,6 @@ except NameError: ...@@ -40,9 +41,6 @@ except NameError:
class WinErr(Exception): class WinErr(Exception):
pass pass
class LockError(Exception):
pass
class ClobberNeeded(Exception): class ClobberNeeded(Exception):
pass pass
...@@ -82,116 +80,6 @@ def exponential_backoff_retry(fn, excs=(Exception,), name=None, count=10, ...@@ -82,116 +80,6 @@ def exponential_backoff_retry(fn, excs=(Exception,), name=None, count=10,
sleep_time *= 2 sleep_time *= 2
class Lockfile(object):
"""Class to represent a cross-platform process-specific lockfile."""
def __init__(self, path, timeout=0):
self.path = os.path.abspath(path)
self.timeout = timeout
self.lockfile = self.path + ".lock"
self.pid = os.getpid()
def _read_pid(self):
"""Read the pid stored in the lockfile.
Note: This method is potentially racy. By the time it returns the lockfile
may have been unlocked, removed, or stolen by some other process.
"""
try:
with open(self.lockfile, 'r') as f:
pid = int(f.readline().strip())
except (IOError, ValueError):
pid = None
return pid
def _make_lockfile(self):
"""Safely creates a lockfile containing the current pid."""
open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
fd = os.open(self.lockfile, open_flags, 0o644)
f = os.fdopen(fd, 'w')
print(self.pid, file=f)
f.close()
def _remove_lockfile(self):
"""Delete the lockfile. Complains (implicitly) if it doesn't exist.
See gclient_utils.py:rmtree docstring for more explanation on the
windows case.
"""
if sys.platform == 'win32':
lockfile = os.path.normcase(self.lockfile)
def delete():
exitcode = subprocess.call(['cmd.exe', '/c',
'del', '/f', '/q', lockfile])
if exitcode != 0:
raise LockError('Failed to remove lock: %s' % (lockfile,))
exponential_backoff_retry(
delete,
excs=(LockError,),
name='del [%s]' % (lockfile,))
else:
os.remove(self.lockfile)
def lock(self):
"""Acquire the lock.
This will block with a deadline of self.timeout seconds.
"""
elapsed = 0
while True:
try:
self._make_lockfile()
return
except OSError as e:
if elapsed < self.timeout:
sleep_time = max(10, min(3, self.timeout - elapsed))
logging.info('Could not create git cache lockfile; '
'will retry after sleep(%d).', sleep_time);
elapsed += sleep_time
time.sleep(sleep_time)
continue
if e.errno == errno.EEXIST:
raise LockError("%s is already locked" % self.path)
else:
raise LockError("Failed to create %s (err %s)" % (self.path, e.errno))
def unlock(self):
"""Release the lock."""
try:
if not self.is_locked():
raise LockError("%s is not locked" % self.path)
if not self.i_am_locking():
raise LockError("%s is locked, but not by me" % self.path)
self._remove_lockfile()
except WinErr:
# Windows is unreliable when it comes to file locking. YMMV.
pass
def break_lock(self):
"""Remove the lock, even if it was created by someone else."""
try:
self._remove_lockfile()
return True
except OSError as exc:
if exc.errno == errno.ENOENT:
return False
else:
raise
def is_locked(self):
"""Test if the file is locked by anyone.
Note: This method is potentially racy. By the time it returns the lockfile
may have been unlocked, removed, or stolen by some other process.
"""
return os.path.exists(self.lockfile)
def i_am_locking(self):
"""Test if the file is locked by this process."""
return self.is_locked() and self.pid == self._read_pid()
class Mirror(object): class Mirror(object):
git_exe = 'git.bat' if sys.platform.startswith('win') else 'git' git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
...@@ -568,7 +456,6 @@ class Mirror(object): ...@@ -568,7 +456,6 @@ class Mirror(object):
shallow=False, shallow=False,
bootstrap=False, bootstrap=False,
verbose=False, verbose=False,
ignore_lock=False,
lock_timeout=0, lock_timeout=0,
reset_fetch_config=False): reset_fetch_config=False):
assert self.GetCachePath() assert self.GetCachePath()
...@@ -576,10 +463,7 @@ class Mirror(object): ...@@ -576,10 +463,7 @@ class Mirror(object):
depth = 10000 depth = 10000
gclient_utils.safe_makedirs(self.GetCachePath()) gclient_utils.safe_makedirs(self.GetCachePath())
lockfile = Lockfile(self.mirror_path, lock_timeout) with lockfile.lock(self.mirror_path, lock_timeout):
if not ignore_lock:
lockfile.lock()
try: try:
self._ensure_bootstrapped(depth, bootstrap, reset_fetch_config) self._ensure_bootstrapped(depth, bootstrap, reset_fetch_config)
self._fetch(self.mirror_path, verbose, depth, no_fetch_tags, self._fetch(self.mirror_path, verbose, depth, no_fetch_tags,
...@@ -588,13 +472,12 @@ class Mirror(object): ...@@ -588,13 +472,12 @@ class Mirror(object):
# This is a major failure, we need to clean and force a bootstrap. # This is a major failure, we need to clean and force a bootstrap.
gclient_utils.rmtree(self.mirror_path) gclient_utils.rmtree(self.mirror_path)
self.print(GIT_CACHE_CORRUPT_MESSAGE) self.print(GIT_CACHE_CORRUPT_MESSAGE)
self._ensure_bootstrapped( self._ensure_bootstrapped(depth,
depth, bootstrap, reset_fetch_config, force=True) bootstrap,
reset_fetch_config,
force=True)
self._fetch(self.mirror_path, verbose, depth, no_fetch_tags, self._fetch(self.mirror_path, verbose, depth, no_fetch_tags,
reset_fetch_config) reset_fetch_config)
finally:
if not ignore_lock:
lockfile.unlock()
def update_bootstrap(self, prune=False, gc_aggressive=False): def update_bootstrap(self, prune=False, gc_aggressive=False):
# The folder is <git number> # The folder is <git number>
...@@ -665,45 +548,6 @@ class Mirror(object): ...@@ -665,45 +548,6 @@ class Mirror(object):
except OSError: except OSError:
logging.warn('Unable to delete temporary pack file %s' % f) logging.warn('Unable to delete temporary pack file %s' % f)
@classmethod
def BreakLocks(cls, path):
did_unlock = False
lf = Lockfile(path)
if lf.break_lock():
did_unlock = True
# Look for lock files that might have been left behind by an interrupted
# git process.
lf = os.path.join(path, 'config.lock')
if os.path.exists(lf):
os.remove(lf)
did_unlock = True
cls.DeleteTmpPackFiles(path)
return did_unlock
def unlock(self):
return self.BreakLocks(self.mirror_path)
@classmethod
def UnlockAll(cls):
cachepath = cls.GetCachePath()
if not cachepath:
return
dirlist = os.listdir(cachepath)
repo_dirs = set([os.path.join(cachepath, path) for path in dirlist
if os.path.isdir(os.path.join(cachepath, path))])
for dirent in dirlist:
if dirent.startswith('_cache_tmp') or dirent.startswith('tmp'):
gclient_utils.rm_file_or_tree(os.path.join(cachepath, dirent))
elif (dirent.endswith('.lock') and
os.path.isfile(os.path.join(cachepath, dirent))):
repo_dirs.add(os.path.join(cachepath, dirent[:-5]))
unlocked_repos = []
for repo_dir in repo_dirs:
if cls.BreakLocks(repo_dir):
unlocked_repos.append(repo_dir)
return unlocked_repos
@subcommand.usage('[url of repo to check for caching]') @subcommand.usage('[url of repo to check for caching]')
def CMDexists(parser, args): def CMDexists(parser, args):
...@@ -768,9 +612,10 @@ def CMDpopulate(parser, args): ...@@ -768,9 +612,10 @@ def CMDpopulate(parser, args):
parser.add_option('--no_bootstrap', '--no-bootstrap', parser.add_option('--no_bootstrap', '--no-bootstrap',
action='store_true', action='store_true',
help='Don\'t bootstrap from Google Storage') help='Don\'t bootstrap from Google Storage')
parser.add_option('--ignore_locks', '--ignore-locks', parser.add_option('--ignore_locks',
'--ignore-locks',
action='store_true', action='store_true',
help='Don\'t try to lock repository') help='NOOP. This flag will be removed in the future.')
parser.add_option('--break-locks', parser.add_option('--break-locks',
action='store_true', action='store_true',
help='Break any existing lock instead of just ignoring it') help='Break any existing lock instead of just ignoring it')
...@@ -780,17 +625,18 @@ def CMDpopulate(parser, args): ...@@ -780,17 +625,18 @@ def CMDpopulate(parser, args):
options, args = parser.parse_args(args) options, args = parser.parse_args(args)
if not len(args) == 1: if not len(args) == 1:
parser.error('git cache populate only takes exactly one repo url.') parser.error('git cache populate only takes exactly one repo url.')
if options.ignore_locks:
print('ignore_locks is no longer used. Please remove its usage.')
if options.break_locks:
print('break_locks is no longer used. Please remove its usage.')
url = args[0] url = args[0]
mirror = Mirror(url, refs=options.ref) mirror = Mirror(url, refs=options.ref)
if options.break_locks:
mirror.unlock()
kwargs = { kwargs = {
'no_fetch_tags': options.no_fetch_tags, 'no_fetch_tags': options.no_fetch_tags,
'verbose': options.verbose, 'verbose': options.verbose,
'shallow': options.shallow, 'shallow': options.shallow,
'bootstrap': not options.no_bootstrap, 'bootstrap': not options.no_bootstrap,
'ignore_lock': options.ignore_locks,
'lock_timeout': options.timeout, 'lock_timeout': options.timeout,
'reset_fetch_config': options.reset_fetch_config, 'reset_fetch_config': options.reset_fetch_config,
} }
...@@ -864,37 +710,10 @@ def CMDfetch(parser, args): ...@@ -864,37 +710,10 @@ def CMDfetch(parser, args):
return 0 return 0
@subcommand.usage('[url of repo to unlock, or -a|--all]') @subcommand.usage('do not use - it is a noop.')
def CMDunlock(parser, args): def CMDunlock(parser, args):
"""Unlock one or all repos if their lock files are still around.""" """This command does nothing."""
parser.add_option('--force', '-f', action='store_true', print('This command does nothing and will be removed in the future.')
help='Actually perform the action')
parser.add_option('--all', '-a', action='store_true',
help='Unlock all repository caches')
options, args = parser.parse_args(args)
if len(args) > 1 or (len(args) == 0 and not options.all):
parser.error('git cache unlock takes exactly one repo url, or --all')
if not options.force:
cachepath = Mirror.GetCachePath()
lockfiles = [os.path.join(cachepath, path)
for path in os.listdir(cachepath)
if path.endswith('.lock') and os.path.isfile(path)]
parser.error('git cache unlock requires -f|--force to do anything. '
'Refusing to unlock the following repo caches: '
', '.join(lockfiles))
unlocked_repos = []
if options.all:
unlocked_repos.extend(Mirror.UnlockAll())
else:
m = Mirror(args[0])
if m.unlock():
unlocked_repos.append(m.mirror_path)
if unlocked_repos:
logging.info('Broke locks on these caches:\n %s' % '\n '.join(
unlocked_repos))
class OptionParser(optparse.OptionParser): class OptionParser(optparse.OptionParser):
......
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Exclusive filelocking for all supported platforms."""
from __future__ import print_function
import contextlib
import logging
import os
import sys
import time
class LockError(Exception):
pass
if sys.platform.startswith('win'):
# Windows implementation
import win32imports
BYTES_TO_LOCK = 1
def _open_file(lockfile):
return win32imports.Handle(
win32imports.CreateFileW(
lockfile, # lpFileName
win32imports.GENERIC_WRITE, # dwDesiredAccess
0, # dwShareMode=prevent others from opening file
None, # lpSecurityAttributes
win32imports.CREATE_ALWAYS, # dwCreationDisposition
win32imports.FILE_ATTRIBUTE_NORMAL, # dwFlagsAndAttributes
None # hTemplateFile
))
def _close_file(handle):
# CloseHandle releases lock too.
win32imports.CloseHandle(handle)
def _lock_file(handle):
ret = win32imports.LockFileEx(
handle, # hFile
win32imports.LOCKFILE_FAIL_IMMEDIATELY
| win32imports.LOCKFILE_EXCLUSIVE_LOCK, # dwFlags
0, #dwReserved
BYTES_TO_LOCK, # nNumberOfBytesToLockLow
0, # nNumberOfBytesToLockHigh
win32imports.Overlapped() # lpOverlapped
)
# LockFileEx returns result as bool, which is converted into an integer
# (1 == successful; 0 == not successful)
if ret == 0:
error_code = win32imports.GetLastError()
raise OSError('Failed to lock handle (error code: %d).' % error_code)
else:
# Unix implementation
import fcntl
def _open_file(lockfile):
open_flags = (os.O_CREAT | os.O_WRONLY)
return os.open(lockfile, open_flags, 0o644)
def _close_file(fd):
os.close(fd)
def _lock_file(fd):
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
def _try_lock(lockfile):
f = _open_file(lockfile)
try:
_lock_file(f)
except Exception:
_close_file(f)
raise
return lambda: _close_file(f)
def _lock(path, timeout=0):
"""_lock returns function to release the lock if locking was successful.
_lock also implements simple retry logic."""
elapsed = 0
while True:
try:
return _try_lock(path + '.locked')
except (OSError, IOError) as e:
if elapsed < timeout:
sleep_time = min(10, timeout - elapsed)
logging.info(
'Could not create git cache lockfile; '
'will retry after sleep(%d).', sleep_time)
elapsed += sleep_time
time.sleep(sleep_time)
continue
raise LockError("Error locking %s (err: %s)" % (path, str(e)))
@contextlib.contextmanager
def lock(path, timeout=0):
"""Get exclusive lock to path.
Usage:
import lockfile
with lockfile.lock(path, timeout):
# Do something
pass
"""
release_fn = _lock(path, timeout)
try:
yield
finally:
release_fn()
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
"""Unit tests for git_cache.py""" """Unit tests for git_cache.py"""
import logging
import os import os
import shutil import shutil
import subprocess import subprocess
...@@ -245,6 +246,8 @@ class MirrorTest(unittest.TestCase): ...@@ -245,6 +246,8 @@ class MirrorTest(unittest.TestCase):
if __name__ == '__main__': if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
sys.exit(coverage_utils.covered_main(( sys.exit(coverage_utils.covered_main((
os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py') os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py')
), required_percentage=0)) ), required_percentage=0))
#!/usr/bin/env vpython3
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for lockfile.py"""
import logging
import os
import shutil
import sys
import tempfile
import threading
import unittest
if sys.version_info.major == 2:
import mock
import Queue
else:
from unittest import mock
import queue as Queue
DEPOT_TOOLS_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, DEPOT_TOOLS_ROOT)
from testing_support import coverage_utils
import lockfile
class LockTest(unittest.TestCase):
def setUp(self):
self.cache_dir = tempfile.mkdtemp(prefix='lockfile')
self.addCleanup(shutil.rmtree, self.cache_dir, ignore_errors=True)
def testLock(self):
with lockfile.lock(self.cache_dir):
# cached dir locked, attempt to lock it again
with self.assertRaises(lockfile.LockError):
with lockfile.lock(self.cache_dir):
pass
with lockfile.lock(self.cache_dir):
pass
@mock.patch('time.sleep')
def testLockConcurrent(self, sleep_mock):
'''testLockConcurrent simulates what happens when two separate processes try
to acquire the same file lock with timeout.'''
# Queues q_f1 and q_sleep are used to controll execution of individual
# threads.
q_f1 = Queue.Queue()
q_sleep = Queue.Queue()
results = Queue.Queue()
def side_effect(arg):
'''side_effect is called when with l.lock is blocked. In this unit test
case, it comes from f2.'''
logging.debug('sleep: started')
q_sleep.put(True)
logging.debug('sleep: waiting for q_sleep to be consumed')
q_sleep.join()
logging.debug('sleep: waiting for result before exiting')
results.get(timeout=1)
logging.debug('sleep: exiting')
sleep_mock.side_effect = side_effect
def f1():
'''f1 enters first in l.lock (controlled via q_f1). It then waits for
side_effect to put a message in queue q_sleep.'''
logging.debug('f1 started, locking')
with lockfile.lock(self.cache_dir, timeout=1):
logging.debug('f1: locked')
q_f1.put(True)
logging.debug('f1: waiting on q_f1 to be consumed')
q_f1.join()
logging.debug('f1: done waiting on q_f1, getting q_sleep')
q_sleep.get(timeout=1)
results.put(True)
logging.debug('f1: lock released')
q_sleep.task_done()
logging.debug('f1: exiting')
def f2():
'''f2 enters second in l.lock (controlled by q_f1).'''
logging.debug('f2: started, consuming q_f1')
q_f1.get(timeout=1) # wait for f1 to execute lock
q_f1.task_done()
logging.debug('f2: done waiting for q_f1, locking')
with lockfile.lock(self.cache_dir, timeout=1):
logging.debug('f2: locked')
results.put(True)
t1 = threading.Thread(target=f1)
t1.start()
t2 = threading.Thread(target=f2)
t2.start()
t1.join()
t2.join()
# One result was consumed by side_effect, we expect only one in the queue.
self.assertEqual(1, results.qsize())
sleep_mock.assert_called_once_with(1)
if __name__ == '__main__':
logging.basicConfig(
level=logging.DEBUG if '-v' in sys.argv else logging.ERROR)
sys.exit(
coverage_utils.covered_main(
(os.path.join(DEPOT_TOOLS_ROOT, 'git_cache.py')),
required_percentage=0))
# Copyright 2020 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Win32 functions and constants."""
import ctypes
import ctypes.wintypes
GENERIC_WRITE = 0x40000000
CREATE_ALWAYS = 0x00000002
FILE_ATTRIBUTE_NORMAL = 0x00000080
LOCKFILE_EXCLUSIVE_LOCK = 0x00000002
LOCKFILE_FAIL_IMMEDIATELY = 0x00000001
class Overlapped(ctypes.Structure):
"""Overlapped is required and used in LockFileEx and UnlockFileEx."""
_fields_ = [('Internal', ctypes.wintypes.LPVOID),
('InternalHigh', ctypes.wintypes.LPVOID),
('Offset', ctypes.wintypes.DWORD),
('OffsetHigh', ctypes.wintypes.DWORD),
('Pointer', ctypes.wintypes.LPVOID),
('hEvent', ctypes.wintypes.HANDLE)]
# https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-createfilew
CreateFileW = ctypes.windll.kernel32.CreateFileW
CreateFileW.argtypes = [
ctypes.wintypes.LPCWSTR, # lpFileName
ctypes.wintypes.DWORD, # dwDesiredAccess
ctypes.wintypes.DWORD, # dwShareMode
ctypes.wintypes.LPVOID, # lpSecurityAttributes
ctypes.wintypes.DWORD, # dwCreationDisposition
ctypes.wintypes.DWORD, # dwFlagsAndAttributes
ctypes.wintypes.LPVOID, # hTemplateFile
]
CreateFileW.restype = ctypes.wintypes.HANDLE
# https://docs.microsoft.com/en-us/windows/win32/api/handleapi/nf-handleapi-closehandle
CloseHandle = ctypes.windll.kernel32.CloseHandle
CloseHandle.argtypes = [
ctypes.wintypes.HANDLE, # hFile
]
CloseHandle.restype = ctypes.wintypes.BOOL
# https://docs.microsoft.com/en-us/windows/win32/api/fileapi/nf-fileapi-lockfileex
LockFileEx = ctypes.windll.kernel32.LockFileEx
LockFileEx.argtypes = [
ctypes.wintypes.HANDLE, # hFile
ctypes.wintypes.DWORD, # dwFlags
ctypes.wintypes.DWORD, # dwReserved
ctypes.wintypes.DWORD, # nNumberOfBytesToLockLow
ctypes.wintypes.DWORD, # nNumberOfBytesToLockHigh
ctypes.POINTER(Overlapped), # lpOverlapped
]
LockFileEx.restype = ctypes.wintypes.BOOL
# Commonly used functions are listed here so callers don't need to import
# ctypes.
GetLastError = ctypes.GetLastError
Handle = ctypes.wintypes.HANDLE
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment