Commit 867e5b53 authored by hinoka@google.com's avatar hinoka@google.com

Scripts to download files from google storage based on sha1 sums

continuation of: https://codereview.chromium.org/11664024
Moved it from chrome/trunk/src/build to depot_tools/

BUG=153360
TEST=two unittests included in tests/
For end-to-end testing, check out a large directory.  Run
find . -name .svn -prune -o -size +1000k -type f -print0 | upload_to_google_storage.py -b chrome-artifacts -0 -
(replacing chrome-artifacts with an upload-able bucket)
to test upload

run "find . -name .svn -prune -o -size +1000k -type f -print0 | xargs -0 rm" to remove the files uploaded.  Check that the large binary files have been removed

run "download_from_google_storage.py -r -d -b chrome-artifacts ." to download the files again.

Review URL: https://chromiumcodereview.appspot.com/12042069

git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@187951 0039d316-1c4b-4281-b951-d872f2087c98
parent 53c1e56c
This diff is collapsed.
This diff is collapsed.
e6c4fbd4fe7607f3e6ebf68b2ea4ef694da7b4fe
This is a test file.
This file exists in a subfolder
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
734d7c1ed3545383837428f031840a1e
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam,
quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo
consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse
cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non
proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
This is the second file.
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for upload_to_google_storage.py."""
import optparse
import os
import Queue
import shutil
import StringIO
import sys
import tempfile
import threading
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
import upload_to_google_storage
from download_from_google_storage_unittests import GsutilMock
# ../third_party/gsutil/gsutil
GSUTIL_DEFAULT_PATH = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'third_party', 'gsutil', 'gsutil')
TEST_DIR = os.path.dirname(os.path.abspath(__file__))
class UploadTests(unittest.TestCase):
def setUp(self):
self.gsutil = GsutilMock(GSUTIL_DEFAULT_PATH)
self.temp_dir = tempfile.mkdtemp(prefix='gstools_test')
self.base_path = os.path.join(self.temp_dir, 'gstools')
shutil.copytree(os.path.join(TEST_DIR, 'gstools'), self.base_path)
self.base_url = 'gs://sometesturl'
self.parser = optparse.OptionParser()
self.ret_codes = Queue.Queue()
self.stdout_queue = Queue.Queue()
self.lorem_ipsum = os.path.join(self.base_path, 'lorem_ipsum.txt')
self.lorem_ipsum_sha1 = '7871c8e24da15bad8b0be2c36edc9dc77e37727f'
def cleanUp(self):
shutil.rmtree(self.temp_dir)
sys.stdin = sys.__stdin__
def test_upload_single_file(self):
filenames = [self.lorem_ipsum]
output_filename = '%s.sha1' % self.lorem_ipsum
code = upload_to_google_storage.upload_to_google_storage(
filenames, self.base_url, self.gsutil, True, False, 1, False)
self.assertEqual(
self.gsutil.history,
[('check_call',
('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
('check_call',
('cp', '-q', filenames[0], '%s/%s' % (self.base_url,
self.lorem_ipsum_sha1)))])
self.assertTrue(os.path.exists(output_filename))
self.assertEqual(
open(output_filename, 'rb').read(),
'7871c8e24da15bad8b0be2c36edc9dc77e37727f')
os.remove(output_filename)
self.assertEqual(code, 0)
def test_upload_single_file_remote_exists(self):
filenames = [self.lorem_ipsum]
output_filename = '%s.sha1' % self.lorem_ipsum
etag_string = 'ETag: 634d7c1ed3545383837428f031840a1e'
self.gsutil.add_expected(0, '', '')
self.gsutil.add_expected(0, etag_string, '')
code = upload_to_google_storage.upload_to_google_storage(
filenames, self.base_url, self.gsutil, False, False, 1, False)
self.assertEqual(
self.gsutil.history,
[('check_call',
('ls', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1))),
('check_call',
('ls', '-L', '%s/%s' % (self.base_url, self.lorem_ipsum_sha1)))])
self.assertTrue(os.path.exists(output_filename))
self.assertEqual(
open(output_filename, 'rb').read(),
'7871c8e24da15bad8b0be2c36edc9dc77e37727f')
os.remove(output_filename)
self.assertEqual(code, 0)
def test_upload_worker_errors(self):
work_queue = Queue.Queue()
work_queue.put((self.lorem_ipsum, self.lorem_ipsum_sha1))
work_queue.put((None, None))
self.gsutil.add_expected(1, '', '') # For the first ls call.
self.gsutil.add_expected(20, '', 'Expected error message')
# pylint: disable=W0212
upload_to_google_storage._upload_worker(
0,
work_queue,
self.base_url,
self.gsutil,
threading.Lock(),
False,
False,
self.stdout_queue,
self.ret_codes)
expected_ret_codes = [
(20,
'Encountered error on uploading %s to %s/%s\nExpected error message' %
(self.lorem_ipsum, self.base_url, self.lorem_ipsum_sha1))]
self.assertEqual(list(self.ret_codes.queue), expected_ret_codes)
def test_skip_hashing(self):
filenames = [self.lorem_ipsum]
output_filename = '%s.sha1' % self.lorem_ipsum
fake_hash = '6871c8e24da15bad8b0be2c36edc9dc77e37727f'
with open(output_filename, 'wb') as f:
f.write(fake_hash) # Fake hash.
code = upload_to_google_storage.upload_to_google_storage(
filenames, self.base_url, self.gsutil, False, False, 1, True)
self.assertEqual(
self.gsutil.history,
[('check_call',
('ls', '%s/%s' % (self.base_url, fake_hash))),
('check_call',
('ls', '-L', '%s/%s' % (self.base_url, fake_hash))),
('check_call',
('cp', '-q', filenames[0], '%s/%s' % (self.base_url, fake_hash)))])
self.assertEqual(
open(output_filename, 'rb').read(), fake_hash)
os.remove(output_filename)
self.assertEqual(code, 0)
def test_get_targets_no_args(self):
try:
upload_to_google_storage.get_targets([], self.parser, False)
self.fail()
except SystemExit, e:
self.assertEqual(e.code, 2)
def test_get_targets_passthrough(self):
result = upload_to_google_storage.get_targets(
['a', 'b', 'c', 'd', 'e'],
self.parser,
False)
self.assertEqual(result, ['a', 'b', 'c', 'd', 'e'])
def test_get_targets_multiple_stdin(self):
inputs = ['a', 'b', 'c', 'd', 'e']
sys.stdin = StringIO.StringIO(os.linesep.join(inputs))
result = upload_to_google_storage.get_targets(
['-'],
self.parser,
False)
self.assertEqual(result, inputs)
def test_get_targets_multiple_stdin_null(self):
inputs = ['a', 'b', 'c', 'd', 'e']
sys.stdin = StringIO.StringIO('\0'.join(inputs))
result = upload_to_google_storage.get_targets(
['-'],
self.parser,
True)
self.assertEqual(result, inputs)
if __name__ == '__main__':
unittest.main()
\ No newline at end of file
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Uploads files to Google Storage content addressed."""
import hashlib
import optparse
import os
import Queue
import re
import sys
import threading
import time
from download_from_google_storage import check_bucket_permissions
from download_from_google_storage import get_sha1
from download_from_google_storage import Gsutil
from download_from_google_storage import printer_worker
GSUTIL_DEFAULT_PATH = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'third_party', 'gsutil', 'gsutil')
USAGE_STRING = """%prog [options] target [target2 ...].
Target is the file intended to be uploaded to Google Storage.
If target is "-", then a list of files will be taken from standard input
This script will generate a file (original filename).sha1 containing the
sha1 sum of the uploaded file.
It is recommended that the .sha1 file is checked into the repository,
the original file removed from the repository, and a hook added to the
DEPS file to call download_from_google_storage.py.
Example usages
--------------
Scan the current directory and upload all files larger than 1MB:
find . -name .svn -prune -o -size +1000k -type f -print0 | %prog -0 -b bkt -
(Replace "bkt" with the name of a writable bucket.)
"""
def get_md5(filename):
md5_calculator = hashlib.md5()
with open(filename, 'rb') as f:
while True:
chunk = f.read(1024*1024)
if not chunk:
break
md5_calculator.update(chunk)
return md5_calculator.hexdigest()
def get_md5_cached(filename):
"""Don't calculate the MD5 if we can find a .md5 file."""
# See if we can find an existing MD5 sum stored in a file.
if os.path.exists('%s.md5' % filename):
with open('%s.md5' % filename, 'rb') as f:
md5_match = re.search('([a-z0-9]{32})', f.read())
if md5_match:
return md5_match.group(1)
else:
md5_hash = get_md5(filename)
with open('%s.md5' % filename, 'wb') as f:
f.write(md5_hash)
return md5_hash
def _upload_worker(
thread_num, upload_queue, base_url, gsutil, md5_lock, force,
use_md5, stdout_queue, ret_codes):
while True:
filename, sha1_sum = upload_queue.get()
if not filename:
break
file_url = '%s/%s' % (base_url, sha1_sum)
if gsutil.check_call('ls', file_url)[0] == 0 and not force:
# File exists, check MD5 hash.
_, out, _ = gsutil.check_call('ls', '-L', file_url)
etag_match = re.search('ETag:\s+([a-z0-9]{32})', out)
if etag_match:
remote_md5 = etag_match.group(1)
# Calculate the MD5 checksum to match it to Google Storage's ETag.
with md5_lock:
if use_md5:
local_md5 = get_md5_cached(filename)
else:
local_md5 = get_md5(filename)
if local_md5 == remote_md5:
stdout_queue.put(
'%d> File %s already exists and MD5 matches, upload skipped' %
(thread_num, filename))
continue
stdout_queue.put('%d> Uploading %s...' % (
thread_num, filename))
code, _, err = gsutil.check_call('cp', '-q', filename, file_url)
if code != 0:
ret_codes.put(
(code,
'Encountered error on uploading %s to %s\n%s' %
(filename, file_url, err)))
continue
def get_targets(args, parser, use_null_terminator):
if not args:
parser.error('Missing target.')
if len(args) == 1 and args[0] == '-':
# Take stdin as a newline or null seperated list of files.
if use_null_terminator:
return sys.stdin.read().split('\0')
else:
return sys.stdin.read().splitlines()
else:
return args
def upload_to_google_storage(
input_filenames, base_url, gsutil, force,
use_md5, num_threads, skip_hashing):
# We only want one MD5 calculation happening at a time to avoid HD thrashing.
md5_lock = threading.Lock()
# Start up all the worker threads plus the printer thread.
all_threads = []
ret_codes = Queue.Queue()
ret_codes.put((0, None))
upload_queue = Queue.Queue()
upload_timer = time.time()
stdout_queue = Queue.Queue()
printer_thread = threading.Thread(target=printer_worker, args=[stdout_queue])
printer_thread.daemon = True
printer_thread.start()
for thread_num in range(num_threads):
t = threading.Thread(
target=_upload_worker,
args=[thread_num, upload_queue, base_url, gsutil, md5_lock,
force, use_md5, stdout_queue, ret_codes])
t.daemon = True
t.start()
all_threads.append(t)
# We want to hash everything in a single thread since its faster.
# The bottleneck is in disk IO, not CPU.
hashing_start = time.time()
for filename in input_filenames:
if not os.path.exists(filename):
stdout_queue.put('Main> Error: %s not found, skipping.' % filename)
continue
if os.path.exists('%s.sha1' % filename) and skip_hashing:
stdout_queue.put(
'Main> Found hash for %s, sha1 calculation skipped.' % filename)
with open(filename + '.sha1', 'rb') as f:
sha1_file = f.read(1024)
if not re.match('^([a-z0-9]{40})$', sha1_file):
print >> sys.stderr, 'Invalid sha1 hash file %s.sha1' % filename
return 1
upload_queue.put((filename, sha1_file))
continue
stdout_queue.put('Main> Calculating hash for %s...' % filename)
sha1_sum = get_sha1(filename)
with open(filename + '.sha1', 'wb') as f:
f.write(sha1_sum)
stdout_queue.put('Main> Done calculating hash for %s.' % filename)
upload_queue.put((filename, sha1_sum))
hashing_duration = time.time() - hashing_start
# Wait for everything to finish.
for _ in all_threads:
upload_queue.put((None, None)) # To mark the end of the work queue.
for t in all_threads:
t.join()
stdout_queue.put(None)
printer_thread.join()
# Print timing information.
print 'Hashing %s files took %1f seconds' % (
len(input_filenames), hashing_duration)
print 'Uploading took %1f seconds' % (time.time() - upload_timer)
# See if we ran into any errors.
max_ret_code = 0
for ret_code, message in ret_codes.queue:
max_ret_code = max(ret_code, max_ret_code)
if message:
print >> sys.stderr, message
if not max_ret_code:
print 'Success!'
return max_ret_code
def main(args):
parser = optparse.OptionParser(USAGE_STRING)
parser.add_option('-b', '--bucket',
help='Google Storage bucket to upload to.')
parser.add_option('-e', '--boto', help='Specify a custom boto file.')
parser.add_option('-f', '--force', action='store_true',
help='Force upload even if remote file exists.')
parser.add_option('-g', '--gsutil_path', default=GSUTIL_DEFAULT_PATH,
help='Path to the gsutil script.')
parser.add_option('-m', '--use_md5', action='store_true',
help='Generate MD5 files when scanning, and don\'t check '
'the MD5 checksum if a .md5 file is found.')
parser.add_option('-t', '--num_threads', default=1, type='int',
help='Number of uploader threads to run.')
parser.add_option('-s', '--skip_hashing', action='store_true',
help='Skip hashing if .sha1 file exists.')
parser.add_option('-0', '--use_null_terminator', action='store_true',
help='Use \\0 instead of \\n when parsing '
'the file list from stdin. This is useful if the input '
'is coming from "find ... -print0".')
(options, args) = parser.parse_args()
# Enumerate our inputs.
input_filenames = get_targets(args, parser, options.use_null_terminator)
# Make sure we can find a working instance of gsutil.
if os.path.exists(GSUTIL_DEFAULT_PATH):
gsutil = Gsutil(GSUTIL_DEFAULT_PATH)
else:
gsutil = None
for path in os.environ["PATH"].split(os.pathsep):
if os.path.exists(path) and 'gsutil' in os.listdir(path):
gsutil = Gsutil(os.path.join(path, 'gsutil'))
if not gsutil:
parser.error('gsutil not found in %s, bad depot_tools checkout?' %
GSUTIL_DEFAULT_PATH)
# Check we have a valid bucket with valid permissions.
base_url, code = check_bucket_permissions(options.bucket, gsutil)
if code:
return code
return upload_to_google_storage(
input_filenames, base_url, gsutil, options.force, options.use_md5,
options.num_threads, options.skip_hashing)
if __name__ == '__main__':
sys.exit(main(sys.argv))
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment