depot_tools: import bot_update gclient git rietveld tryserver recipe modules

BUG=582074

Review URL: https://codereview.chromium.org/1642023002

git-svn-id: svn://svn.chromium.org/chrome/trunk/tools/depot_tools@298447 0039d316-1c4b-4281-b951-d872f2087c98
parent d2ef7086
DEPS = [
'gclient',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'rietveld',
'recipe_engine/step',
'tryserver',
]
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Recipe module to ensure a checkout is consistant on a bot."""
from recipe_engine import recipe_api
# This is just for testing, to indicate if a master is using a Git scheduler
# or not.
SVN_MASTERS = (
'experimental.svn',
)
def jsonish_to_python(spec, is_top=False):
"""Turn a json spec into a python parsable object.
This exists because Gclient specs, while resembling json, is actually
ingested using a python "eval()". Therefore a bit of plumming is required
to turn our newly constructed Gclient spec into a gclient-readable spec.
"""
ret = ''
if is_top: # We're the 'top' level, so treat this dict as a suite.
ret = '\n'.join(
'%s = %s' % (k, jsonish_to_python(spec[k])) for k in sorted(spec)
)
else:
if isinstance(spec, dict):
ret += '{'
ret += ', '.join(
"%s: %s" % (repr(str(k)), jsonish_to_python(spec[k]))
for k in sorted(spec)
)
ret += '}'
elif isinstance(spec, list):
ret += '['
ret += ', '.join(jsonish_to_python(x) for x in spec)
ret += ']'
elif isinstance(spec, basestring):
ret = repr(str(spec))
else:
ret = repr(spec)
return ret
class BotUpdateApi(recipe_api.RecipeApi):
def __init__(self, *args, **kwargs):
self._properties = {}
super(BotUpdateApi, self).__init__(*args, **kwargs)
def __call__(self, name, cmd, **kwargs):
"""Wrapper for easy calling of bot_update."""
assert isinstance(cmd, (list, tuple))
bot_update_path = self.resource('bot_update.py')
kwargs.setdefault('infra_step', True)
return self.m.python(name, bot_update_path, cmd, **kwargs)
@property
def properties(self):
return self._properties
def ensure_checkout(self, gclient_config=None, suffix=None,
patch=True, update_presentation=True,
force=False, patch_root=None, no_shallow=False,
with_branch_heads=False, refs=None,
patch_project_roots=None, patch_oauth2=False,
output_manifest=True, clobber=False,
root_solution_revision=None, **kwargs):
refs = refs or []
# We can re-use the gclient spec from the gclient module, since all the
# data bot_update needs is already configured into the gclient spec.
cfg = gclient_config or self.m.gclient.c
spec_string = jsonish_to_python(cfg.as_jsonish(), True)
# Used by bot_update to determine if we want to run or not.
master = self.m.properties['mastername']
builder = self.m.properties['buildername']
slave = self.m.properties['slavename']
# Construct our bot_update command. This basically be inclusive of
# everything required for bot_update to know:
root = patch_root
if root is None:
root = cfg.solutions[0].name
additional = self.m.rietveld.calculate_issue_root(patch_project_roots)
if additional:
root = self.m.path.join(root, additional)
if patch:
issue = self.m.properties.get('issue')
patchset = self.m.properties.get('patchset')
patch_url = self.m.properties.get('patch_url')
gerrit_repo = self.m.properties.get('repository')
gerrit_ref = self.m.properties.get('event.patchSet.ref')
else:
# The trybot recipe sometimes wants to de-apply the patch. In which case
# we pretend the issue/patchset/patch_url never existed.
issue = patchset = patch_url = email_file = key_file = None
gerrit_repo = gerrit_ref = None
# Issue and patchset must come together.
if issue:
assert patchset
if patchset:
assert issue
if patch_url:
# If patch_url is present, bot_update will actually ignore issue/ps.
issue = patchset = None
# The gerrit_ref and gerrit_repo must be together or not at all. If one is
# missing, clear both of them.
if not gerrit_ref or not gerrit_repo:
gerrit_repo = gerrit_ref = None
assert (gerrit_ref != None) == (gerrit_repo != None)
# Point to the oauth2 auth files if specified.
# These paths are where the bots put their credential files.
if patch_oauth2:
email_file = self.m.path['build'].join(
'site_config', '.rietveld_client_email')
key_file = self.m.path['build'].join(
'site_config', '.rietveld_secret_key')
else:
email_file = key_file = None
rev_map = {}
if self.m.gclient.c:
rev_map = self.m.gclient.c.got_revision_mapping.as_jsonish()
flags = [
# 1. Do we want to run? (master/builder/slave).
['--master', master],
['--builder', builder],
['--slave', slave],
# 2. What do we want to check out (spec/root/rev/rev_map).
['--spec', spec_string],
['--root', root],
['--revision_mapping_file', self.m.json.input(rev_map)],
# 3. How to find the patch, if any (issue/patchset/patch_url).
['--issue', issue],
['--patchset', patchset],
['--patch_url', patch_url],
['--rietveld_server', self.m.properties.get('rietveld')],
['--gerrit_repo', gerrit_repo],
['--gerrit_ref', gerrit_ref],
['--apply_issue_email_file', email_file],
['--apply_issue_key_file', key_file],
# 4. Hookups to JSON output back into recipes.
['--output_json', self.m.json.output()],]
# Collect all fixed revisions to simulate them in the json output.
# Fixed revision are the explicit input revisions of bot_update.py, i.e.
# every command line parameter "--revision name@value".
fixed_revisions = {}
revisions = {}
for solution in cfg.solutions:
if solution.revision:
revisions[solution.name] = solution.revision
elif solution == cfg.solutions[0]:
revisions[solution.name] = (
self.m.properties.get('parent_got_revision') or
self.m.properties.get('revision') or
'HEAD')
if self.m.gclient.c and self.m.gclient.c.revisions:
revisions.update(self.m.gclient.c.revisions)
if cfg.solutions and root_solution_revision:
revisions[cfg.solutions[0].name] = root_solution_revision
# Allow for overrides required to bisect into rolls.
revisions.update(self.m.properties.get('deps_revision_overrides', {}))
for name, revision in sorted(revisions.items()):
fixed_revision = self.m.gclient.resolve_revision(revision)
if fixed_revision:
fixed_revisions[name] = fixed_revision
flags.append(['--revision', '%s@%s' % (name, fixed_revision)])
# Add extra fetch refspecs.
for ref in refs:
flags.append(['--refs', ref])
# Filter out flags that are None.
cmd = [item for flag_set in flags
for item in flag_set if flag_set[1] is not None]
if clobber:
cmd.append('--clobber')
if force:
cmd.append('--force')
if no_shallow:
cmd.append('--no_shallow')
if output_manifest:
cmd.append('--output_manifest')
if with_branch_heads or cfg.with_branch_heads:
cmd.append('--with_branch_heads')
# Inject Json output for testing.
git_mode = self.m.properties.get('mastername') not in SVN_MASTERS
first_sln = cfg.solutions[0].name
step_test_data = lambda: self.test_api.output_json(
master, builder, slave, root, first_sln, rev_map, git_mode, force,
self.m.properties.get('fail_patch', False),
output_manifest=output_manifest, fixed_revisions=fixed_revisions)
# Add suffixes to the step name, if specified.
name = 'bot_update'
if not patch:
name += ' (without patch)'
if suffix:
name += ' - %s' % suffix
# Ah hah! Now that everything is in place, lets run bot_update!
try:
# 87 and 88 are the 'patch failure' codes for patch download and patch
# apply, respectively. We don't actually use the error codes, and instead
# rely on emitted json to determine cause of failure.
self(name, cmd, step_test_data=step_test_data,
ok_ret=(0, 87, 88), **kwargs)
finally:
step_result = self.m.step.active_result
self._properties = step_result.json.output.get('properties', {})
if update_presentation:
# Set properties such as got_revision.
for prop_name, prop_value in self.properties.iteritems():
step_result.presentation.properties[prop_name] = prop_value
# Add helpful step description in the step UI.
if 'step_text' in step_result.json.output:
step_text = step_result.json.output['step_text']
step_result.presentation.step_text = step_text
# Add log line output.
if 'log_lines' in step_result.json.output:
for log_name, log_lines in step_result.json.output['log_lines']:
step_result.presentation.logs[log_name] = log_lines.splitlines()
# Set the "checkout" path for the main solution.
# This is used by the Chromium module to figure out where to look for
# the checkout.
# If there is a patch failure, emit another step that said things failed.
if step_result.json.output.get('patch_failure'):
return_code = step_result.json.output.get('patch_apply_return_code')
if return_code == 3:
# This is download failure, hence an infra failure.
# Sadly, python.failing_step doesn't support kwargs.
self.m.python.inline(
'Patch failure',
('import sys;'
'print "Patch download failed. See bot_update step for details";'
'sys.exit(1)'),
infra_step=True,
step_test_data=lambda: self.m.raw_io.test_api.output(
'Patch download failed. See bot_update step for details',
retcode=1)
)
else:
# This is actual patch failure.
self.m.tryserver.set_patch_failure_tryjob_result()
self.m.python.failing_step(
'Patch failure', 'Check the bot_update step for details')
# bot_update actually just sets root to be the folder name of the
# first solution.
if step_result.json.output['did_run']:
co_root = step_result.json.output['root']
cwd = kwargs.get('cwd', self.m.path['slave_build'])
if 'checkout' not in self.m.path:
self.m.path['checkout'] = cwd.join(*co_root.split(self.m.path.sep))
return step_result
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"chromium.linux",
"--builder",
"Linux Builder",
"--slave",
"totallyaslave-m1",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@abc"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update (without patch)",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"abc\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"chromium.linux",
"--builder",
"Linux Builder",
"--slave",
"totallyaslave-m1",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD",
"--output_manifest"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"manifest\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"repository\": \"https://fake.org/src.git\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"chromium.linux",
"--builder",
"Linux Builder",
"--slave",
"totallyaslave-m1",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD",
"--with_branch_heads"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update - with branch heads",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"experimental",
"--builder",
"Experimental Builder",
"--slave",
"somehost",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD",
"--clobber"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"experimental",
"--builder",
"Experimental Builder",
"--slave",
"somehost",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD",
"--force"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"experimental",
"--builder",
"Experimental Builder",
"--slave",
"somehost",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD",
"--no_shallow"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"experimental",
"--builder",
"Experimental Builder",
"--slave",
"somehost",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"experimental",
"--builder",
"Experimental Builder",
"--slave",
"somehost",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@revision"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"experimental.svn",
"--builder",
"Experimental SVN Builder",
"--slave",
"somehost",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD",
"--force"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": 170242, @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_git\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@170242@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_git@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"tryserver.chromium.linux",
"--builder",
"linux_rel",
"--slave",
"totallyaslave-c4",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD",
"--refs",
"+refs/change/1/2/333"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"tryserver.chromium.linux",
"--builder",
"linux_rel",
"--slave",
"totallyaslave-c4",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--apply_issue_email_file",
"[BUILD]/site_config/.rietveld_client_email",
"--apply_issue_key_file",
"[BUILD]/site_config/.rietveld_secret_key",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"tryserver.chromium.linux",
"--builder",
"linux_rel",
"--slave",
"totallyaslave-c4",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--patch_url",
"http://src.chromium.org/foo/bar",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"tryserver.chromium.linux",
"--builder",
"linux_rel",
"--slave",
"totallyaslave-c4",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--patch_url",
"http://src.chromium.org/foo/bar",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@STEP_EXCEPTION@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"reason": "Infra Failure: Step('bot_update') returned 1",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"tryserver.chromium.linux",
"--builder",
"linux_rel",
"--slave",
"totallyaslave-c4",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--patch_url",
"http://src.chromium.org/foo/bar",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"log_lines\": [@@@",
"@@@STEP_LOG_LINE@json.output@ [@@@",
"@@@STEP_LOG_LINE@json.output@ \"patch error\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"Patch failed to apply\"@@@",
"@@@STEP_LOG_LINE@json.output@ ]@@@",
"@@@STEP_LOG_LINE@json.output@ ], @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_apply_return_code\": 1, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@STEP_LOG_LINE@patch error@Patch failed to apply@@@",
"@@@STEP_LOG_END@patch error@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@",
"@@@SET_BUILD_PROPERTY@failure_type@\"PATCH_FAILURE\"@@@"
]
},
{
"cmd": [
"python",
"-u",
"import sys; sys.exit(1)"
],
"cwd": "[SLAVE_BUILD]",
"name": "Patch failure",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_TEXT@Check the bot_update step for details@@@",
"@@@STEP_FAILURE@@@"
]
},
{
"name": "$result",
"reason": "Step('Patch failure') failed with return_code 1",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"tryserver.chromium.linux",
"--builder",
"linux_rel",
"--slave",
"totallyaslave-c4",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--patch_url",
"http://src.chromium.org/foo/bar",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"log_lines\": [@@@",
"@@@STEP_LOG_LINE@json.output@ [@@@",
"@@@STEP_LOG_LINE@json.output@ \"patch error\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"Patch failed to apply\"@@@",
"@@@STEP_LOG_LINE@json.output@ ]@@@",
"@@@STEP_LOG_LINE@json.output@ ], @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_apply_return_code\": 3, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@STEP_LOG_LINE@patch error@Patch failed to apply@@@",
"@@@STEP_LOG_END@patch error@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"cmd": [
"python",
"-u",
"import sys;print \"Patch download failed. See bot_update step for details\";sys.exit(1)"
],
"cwd": "[SLAVE_BUILD]",
"name": "Patch failure",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_LOG_LINE@python.inline@import sys;print \"Patch download failed. See bot_update step for details\";sys.exit(1)@@@",
"@@@STEP_LOG_END@python.inline@@@",
"@@@STEP_EXCEPTION@@@"
]
},
{
"name": "$result",
"reason": "Infra Failure: Step('Patch failure') returned 1",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[bot_update]/resources/bot_update.py",
"--master",
"tryserver.chromium.linux",
"--builder",
"linux_rel",
"--slave",
"totallyaslave-c4",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'src', 'url': 'svn://svn.chromium.org/chrome/trunk/src'}]",
"--root",
"src/v8",
"--revision_mapping_file",
"{\"src\": \"got_cr_revision\"}",
"--patch_url",
"http://src.chromium.org/foo/bar",
"--output_json",
"/path/to/tmp/json",
"--revision",
"src@HEAD",
"--revision",
"src/v8@abc"
],
"cwd": "[SLAVE_BUILD]",
"name": "bot_update",
"~followup_annotations": [
"@@@STEP_TEXT@Some step text@@@",
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"did_run\": true, @@@",
"@@@STEP_LOG_LINE@json.output@ \"fixed_revisions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src\": \"HEAD\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"src/v8\": \"abc\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_failure\": false, @@@",
"@@@STEP_LOG_LINE@json.output@ \"patch_root\": \"src/v8\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"properties\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"got_cr_revision_cp\": \"refs/heads/master@{#170242}\"@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"root\": \"src\", @@@",
"@@@STEP_LOG_LINE@json.output@ \"step_text\": \"Some step text\"@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision@\"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@SET_BUILD_PROPERTY@got_cr_revision_cp@\"refs/heads/master@{#170242}\"@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'bot_update',
'gclient',
'recipe_engine/path',
'recipe_engine/properties',
]
def RunSteps(api):
api.gclient.use_mirror = True
src_cfg = api.gclient.make_config()
soln = src_cfg.solutions.add()
soln.name = 'src'
soln.url = 'svn://svn.chromium.org/chrome/trunk/src'
soln.revision = api.properties.get('revision')
api.gclient.c = src_cfg
api.gclient.c.revisions = api.properties.get('revisions', {})
api.gclient.c.got_revision_mapping['src'] = 'got_cr_revision'
patch = api.properties.get('patch', True)
clobber = True if api.properties.get('clobber') else False
force = True if api.properties.get('force') else False
no_shallow = True if api.properties.get('no_shallow') else False
output_manifest = api.properties.get('output_manifest', False)
with_branch_heads = api.properties.get('with_branch_heads', False)
refs = api.properties.get('refs', [])
oauth2 = api.properties.get('oauth2', False)
root_solution_revision = api.properties.get('root_solution_revision')
suffix = api.properties.get('suffix')
api.bot_update.ensure_checkout(force=force,
no_shallow=no_shallow,
patch=patch,
with_branch_heads=with_branch_heads,
output_manifest=output_manifest,
refs=refs, patch_oauth2=oauth2,
clobber=clobber,
root_solution_revision=root_solution_revision,
suffix=suffix)
def GenTests(api):
yield api.test('basic') + api.properties(
mastername='chromium.linux',
buildername='Linux Builder',
slavename='totallyaslave-m1',
patch=False,
revision='abc'
)
yield api.test('basic_with_branch_heads') + api.properties(
mastername='chromium.linux',
buildername='Linux Builder',
slavename='totallyaslave-m1',
with_branch_heads=True,
suffix='with branch heads'
)
yield api.test('basic_output_manifest') + api.properties(
mastername='chromium.linux',
buildername='Linux Builder',
slavename='totallyaslave-m1',
output_manifest=True,
)
yield api.test('tryjob') + api.properties(
mastername='tryserver.chromium.linux',
buildername='linux_rel',
slavename='totallyaslave-c4',
issue=12345,
patchset=654321,
patch_url='http://src.chromium.org/foo/bar'
)
yield api.test('trychange') + api.properties(
mastername='tryserver.chromium.linux',
buildername='linux_rel',
slavename='totallyaslave-c4',
refs=['+refs/change/1/2/333'],
)
yield api.test('trychange_oauth2') + api.properties(
mastername='tryserver.chromium.linux',
buildername='linux_rel',
slavename='totallyaslave-c4',
oauth2=True,
)
yield api.test('tryjob_fail') + api.properties(
mastername='tryserver.chromium.linux',
buildername='linux_rel',
slavename='totallyaslave-c4',
issue=12345,
patchset=654321,
patch_url='http://src.chromium.org/foo/bar',
) + api.step_data('bot_update', retcode=1)
yield api.test('tryjob_fail_patch') + api.properties(
mastername='tryserver.chromium.linux',
buildername='linux_rel',
slavename='totallyaslave-c4',
issue=12345,
patchset=654321,
patch_url='http://src.chromium.org/foo/bar',
fail_patch='apply',
) + api.step_data('bot_update', retcode=88)
yield api.test('tryjob_fail_patch_download') + api.properties(
mastername='tryserver.chromium.linux',
buildername='linux_rel',
slavename='totallyaslave-c4',
issue=12345,
patchset=654321,
patch_url='http://src.chromium.org/foo/bar',
fail_patch='download'
) + api.step_data('bot_update', retcode=87)
yield api.test('forced') + api.properties(
mastername='experimental',
buildername='Experimental Builder',
slavename='somehost',
force=1
)
yield api.test('no_shallow') + api.properties(
mastername='experimental',
buildername='Experimental Builder',
slavename='somehost',
no_shallow=1
)
yield api.test('off') + api.properties(
mastername='experimental',
buildername='Experimental Builder',
slavename='somehost',
)
yield api.test('svn_mode') + api.properties(
mastername='experimental.svn',
buildername='Experimental SVN Builder',
slavename='somehost',
force=1
)
yield api.test('clobber') + api.properties(
mastername='experimental',
buildername='Experimental Builder',
slavename='somehost',
clobber=1
)
yield api.test('reset_root_solution_revision') + api.properties(
mastername='experimental',
buildername='Experimental Builder',
slavename='somehost',
root_solution_revision='revision',
)
yield api.test('tryjob_v8') + api.properties(
mastername='tryserver.chromium.linux',
buildername='linux_rel',
slavename='totallyaslave-c4',
issue=12345,
patchset=654321,
patch_url='http://src.chromium.org/foo/bar',
patch_project='v8',
revisions={'src/v8': 'abc'}
)
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# TODO(hinoka): Use logging.
import cStringIO
import codecs
import collections
import copy
import ctypes
import json
import optparse
import os
import pprint
import random
import re
import socket
import subprocess
import sys
import tempfile
import threading
import time
import urllib2
import urlparse
import uuid
import os.path as path
# How many bytes at a time to read from pipes.
BUF_SIZE = 256
# TODO(luqui): This is a horrible hack to identify build_internal when build
# is a recipe dependency. bot_update should not be depending on internal,
# rather the arrow should go the other way (or just be destroyed).
def check_dir(name, dirs, default=None):
for d in dirs:
d = path.abspath(d)
if path.basename(d) == name and path.isdir(d):
return d
return default
# Define a bunch of directory paths.
# Relative to the current working directory.
CURRENT_DIR = path.abspath(os.getcwd())
BUILDER_DIR = path.dirname(CURRENT_DIR)
SLAVE_DIR = path.dirname(BUILDER_DIR)
# Relative to this script's filesystem path.
THIS_DIR = path.dirname(path.abspath(__file__))
SCRIPTS_DIR = check_dir(
'scripts', [
path.dirname(THIS_DIR),
path.join(SLAVE_DIR, '..', 'scripts'),
], default=path.dirname(THIS_DIR))
BUILD_DIR = path.dirname(SCRIPTS_DIR)
ROOT_DIR = path.dirname(BUILD_DIR)
DEPOT_TOOLS_DIR = path.abspath(path.join(THIS_DIR, '..', '..', '..'))
BUILD_INTERNAL_DIR = check_dir(
'build_internal', [
path.join(ROOT_DIR, 'build_internal'),
path.join(ROOT_DIR, # .recipe_deps
path.pardir, # slave
path.pardir, # scripts
path.pardir), # build_internal
])
CHROMIUM_GIT_HOST = 'https://chromium.googlesource.com'
CHROMIUM_SRC_URL = CHROMIUM_GIT_HOST + '/chromium/src.git'
# Official builds use buildspecs, so this is a special case.
BUILDSPEC_TYPE = collections.namedtuple('buildspec',
('container', 'version'))
BUILDSPEC_RE = (r'^/chrome-internal/trunk/tools/buildspec/'
'(build|branches|releases)/(.+)$')
GIT_BUILDSPEC_PATH = ('https://chrome-internal.googlesource.com/chrome/tools/'
'buildspec')
BRANCH_HEADS_REFSPEC = '+refs/branch-heads/*'
BUILDSPEC_COMMIT_RE = (
re.compile(r'Buildspec for.*version (\d+\.\d+\.\d+\.\d+)'),
re.compile(r'Create (\d+\.\d+\.\d+\.\d+) buildspec'),
re.compile(r'Auto-converted (\d+\.\d+\.\d+\.\d+) buildspec to git'),
)
# Regular expression that matches a single commit footer line.
COMMIT_FOOTER_ENTRY_RE = re.compile(r'([^:]+):\s+(.+)')
# Footer metadata keys for regular and gsubtreed mirrored commit positions.
COMMIT_POSITION_FOOTER_KEY = 'Cr-Commit-Position'
COMMIT_ORIGINAL_POSITION_FOOTER_KEY = 'Cr-Original-Commit-Position'
# Regular expression to parse a commit position
COMMIT_POSITION_RE = re.compile(r'(.+)@\{#(\d+)\}')
# Regular expression to parse gclient's revinfo entries.
REVINFO_RE = re.compile(r'^([^:]+):\s+([^@]+)@(.+)$')
# Used by 'ResolveSvnRevisionFromGitiles'
GIT_SVN_PROJECT_MAP = {
'webkit': {
'svn_url': 'svn://svn.chromium.org/blink',
'branch_map': [
(r'trunk', r'refs/heads/master'),
(r'branches/([^/]+)', r'refs/branch-heads/\1'),
],
},
'v8': {
'svn_url': 'https://v8.googlecode.com/svn',
'branch_map': [
(r'trunk', r'refs/heads/candidates'),
(r'branches/bleeding_edge', r'refs/heads/master'),
(r'branches/([^/]+)', r'refs/branch-heads/\1'),
],
},
'nacl': {
'svn_url': 'svn://svn.chromium.org/native_client',
'branch_map': [
(r'trunk/src/native_client', r'refs/heads/master'),
],
},
}
# Key for the 'git-svn' ID metadata commit footer entry.
GIT_SVN_ID_FOOTER_KEY = 'git-svn-id'
# e.g., git-svn-id: https://v8.googlecode.com/svn/trunk@23117
# ce2b1a6d-e550-0410-aec6-3dcde31c8c00
GIT_SVN_ID_RE = re.compile(r'((?:\w+)://[^@]+)@(\d+)\s+(?:[a-zA-Z0-9\-]+)')
# This is the git mirror of the buildspecs repository. We could rely on the svn
# checkout, now that the git buildspecs are checked in alongside the svn
# buildspecs, but we're going to want to pull all the buildspecs from here
# eventually anyhow, and there's already some logic to pull from git (for the
# old git_buildspecs.git repo), so just stick with that.
GIT_BUILDSPEC_REPO = (
'https://chrome-internal.googlesource.com/chrome/tools/buildspec')
# Copied from scripts/recipes/chromium.py.
GOT_REVISION_MAPPINGS = {
'/chrome/trunk/src': {
'src/': 'got_revision',
'src/native_client/': 'got_nacl_revision',
'src/tools/swarm_client/': 'got_swarm_client_revision',
'src/tools/swarming_client/': 'got_swarming_client_revision',
'src/third_party/WebKit/': 'got_webkit_revision',
'src/third_party/webrtc/': 'got_webrtc_revision',
'src/v8/': 'got_v8_revision',
}
}
BOT_UPDATE_MESSAGE = """
What is the "Bot Update" step?
==============================
This step ensures that the source checkout on the bot (e.g. Chromium's src/ and
its dependencies) is checked out in a consistent state. This means that all of
the necessary repositories are checked out, no extra repositories are checked
out, and no locally modified files are present.
These actions used to be taken care of by the "gclient revert" and "update"
steps. However, those steps are known to be buggy and occasionally flaky. This
step has two main advantages over them:
* it only operates in Git, so the logic can be clearer and cleaner; and
* it is a slave-side script, so its behavior can be modified without
restarting the master.
Why Git, you ask? Because that is the direction that the Chromium project is
heading. This step is an integral part of the transition from using the SVN repo
at chrome/trunk/src to using the Git repo src.git. Please pardon the dust while
we fully convert everything to Git. This message will get out of your way
eventually, and the waterfall will be a happier place because of it.
This step can be activated or deactivated independently on every builder on
every master. When it is active, the "gclient revert" and "update" steps become
no-ops. When it is inactive, it prints this message, cleans up after itself, and
lets everything else continue as though nothing has changed. Eventually, when
everything is stable enough, this step will replace them entirely.
Debugging information:
(master/builder/slave may be unspecified on recipes)
master: %(master)s
builder: %(builder)s
slave: %(slave)s
forced by recipes: %(recipe)s
CURRENT_DIR: %(CURRENT_DIR)s
BUILDER_DIR: %(BUILDER_DIR)s
SLAVE_DIR: %(SLAVE_DIR)s
THIS_DIR: %(THIS_DIR)s
SCRIPTS_DIR: %(SCRIPTS_DIR)s
BUILD_DIR: %(BUILD_DIR)s
ROOT_DIR: %(ROOT_DIR)s
DEPOT_TOOLS_DIR: %(DEPOT_TOOLS_DIR)s
bot_update.py is:"""
ACTIVATED_MESSAGE = """ACTIVE.
The bot will perform a Git checkout in this step.
The "gclient revert" and "update" steps are no-ops.
"""
NOT_ACTIVATED_MESSAGE = """INACTIVE.
This step does nothing. You actually want to look at the "update" step.
"""
GCLIENT_TEMPLATE = """solutions = %(solutions)s
cache_dir = r%(cache_dir)s
%(target_os)s
%(target_os_only)s
"""
internal_data = {}
if BUILD_INTERNAL_DIR:
local_vars = {}
try:
execfile(os.path.join(
BUILD_INTERNAL_DIR, 'scripts', 'slave', 'bot_update_cfg.py'),
local_vars)
except Exception:
# Same as if BUILD_INTERNAL_DIR didn't exist in the first place.
print 'Warning: unable to read internal configuration file.'
print 'If this is an internal bot, this step may be erroneously inactive.'
internal_data = local_vars
RECOGNIZED_PATHS = {
# If SVN path matches key, the entire URL is rewritten to the Git url.
'/chrome/trunk/src':
CHROMIUM_SRC_URL,
'/chrome/trunk/src/tools/cros.DEPS':
CHROMIUM_GIT_HOST + '/chromium/src/tools/cros.DEPS.git',
}
RECOGNIZED_PATHS.update(internal_data.get('RECOGNIZED_PATHS', {}))
ENABLED_MASTERS = [
'bot_update.always_on',
'chromium.android',
'chromium.angle',
'chromium.chrome',
'chromium.chromedriver',
'chromium.chromiumos',
'chromium',
'chromium.fyi',
'chromium.goma',
'chromium.gpu',
'chromium.gpu.fyi',
'chromium.infra',
'chromium.infra.cron',
'chromium.linux',
'chromium.lkgr',
'chromium.mac',
'chromium.memory',
'chromium.memory.fyi',
'chromium.perf',
'chromium.perf.fyi',
'chromium.swarm',
'chromium.webkit',
'chromium.webrtc',
'chromium.webrtc.fyi',
'chromium.win',
'client.catapult',
'client.drmemory',
'client.mojo',
'client.nacl',
'client.nacl.ports',
'client.nacl.sdk',
'client.nacl.toolchain',
'client.pdfium',
'client.skia',
'client.skia.fyi',
'client.v8',
'client.v8.branches',
'client.v8.fyi',
'client.webrtc',
'client.webrtc.fyi',
'tryserver.blink',
'tryserver.client.catapult',
'tryserver.client.mojo',
'tryserver.chromium.android',
'tryserver.chromium.angle',
'tryserver.chromium.linux',
'tryserver.chromium.mac',
'tryserver.chromium.perf',
'tryserver.chromium.win',
'tryserver.infra',
'tryserver.nacl',
'tryserver.v8',
'tryserver.webrtc',
]
ENABLED_MASTERS += internal_data.get('ENABLED_MASTERS', [])
ENABLED_BUILDERS = {
'client.dart.fyi': [
'v8-linux-release',
'v8-mac-release',
'v8-win-release',
],
'client.dynamorio': [
'linux-v8-dr',
],
}
ENABLED_BUILDERS.update(internal_data.get('ENABLED_BUILDERS', {}))
ENABLED_SLAVES = {}
ENABLED_SLAVES.update(internal_data.get('ENABLED_SLAVES', {}))
# Disabled filters get run AFTER enabled filters, so for example if a builder
# config is enabled, but a bot on that builder is disabled, that bot will
# be disabled.
DISABLED_BUILDERS = {}
DISABLED_BUILDERS.update(internal_data.get('DISABLED_BUILDERS', {}))
DISABLED_SLAVES = {}
DISABLED_SLAVES.update(internal_data.get('DISABLED_SLAVES', {}))
# These masters work only in Git, meaning for got_revision, always output
# a git hash rather than a SVN rev.
GIT_MASTERS = [
'client.v8',
'client.v8.branches',
'tryserver.v8',
]
GIT_MASTERS += internal_data.get('GIT_MASTERS', [])
# How many times to try before giving up.
ATTEMPTS = 5
# Find deps2git
DEPS2GIT_DIR_PATH = path.join(SCRIPTS_DIR, 'tools', 'deps2git')
DEPS2GIT_PATH = path.join(DEPS2GIT_DIR_PATH, 'deps2git.py')
S2G_INTERNAL_PATH = path.join(SCRIPTS_DIR, 'tools', 'deps2git_internal',
'svn_to_git_internal.py')
# ../../cache_dir aka /b/build/slave/cache_dir
GIT_CACHE_PATH = path.join(DEPOT_TOOLS_DIR, 'git_cache.py')
CACHE_DIR = path.join(SLAVE_DIR, 'cache_dir')
# Because we print CACHE_DIR out into a .gclient file, and then later run
# eval() on it, backslashes need to be escaped, otherwise "E:\b\build" gets
# parsed as "E:[\x08][\x08]uild".
if sys.platform.startswith('win'):
CACHE_DIR = CACHE_DIR.replace('\\', '\\\\')
# Find the patch tool.
if sys.platform.startswith('win'):
if not BUILD_INTERNAL_DIR:
print 'Warning: could not find patch tool because there is no '
print 'build_internal present.'
PATCH_TOOL = None
else:
PATCH_TOOL = path.join(BUILD_INTERNAL_DIR, 'tools', 'patch.EXE')
else:
PATCH_TOOL = '/usr/bin/patch'
# If there is less than 100GB of disk space on the system, then we do
# a shallow checkout.
SHALLOW_CLONE_THRESHOLD = 100 * 1024 * 1024 * 1024
class SubprocessFailed(Exception):
def __init__(self, message, code, output):
Exception.__init__(self, message)
self.code = code
self.output = output
class PatchFailed(SubprocessFailed):
pass
class GclientSyncFailed(SubprocessFailed):
pass
class SVNRevisionNotFound(Exception):
pass
class InvalidDiff(Exception):
pass
class Inactive(Exception):
"""Not really an exception, just used to exit early cleanly."""
pass
RETRY = object()
OK = object()
FAIL = object()
class PsPrinter(object):
def __init__(self, interval=300):
self.interval = interval
self.active = sys.platform.startswith('linux2')
self.thread = None
@staticmethod
def print_pstree():
"""Debugging function used to print "ps auxwwf" for stuck processes."""
subprocess.call(['ps', 'auxwwf'])
def poke(self):
if self.active:
self.cancel()
self.thread = threading.Timer(self.interval, self.print_pstree)
self.thread.start()
def cancel(self):
if self.active and self.thread is not None:
self.thread.cancel()
self.thread = None
def call(*args, **kwargs): # pragma: no cover
"""Interactive subprocess call."""
kwargs['stdout'] = subprocess.PIPE
kwargs['stderr'] = subprocess.STDOUT
kwargs.setdefault('bufsize', BUF_SIZE)
cwd = kwargs.get('cwd', os.getcwd())
result_fn = kwargs.pop('result_fn', lambda code, out: RETRY if code else OK)
stdin_data = kwargs.pop('stdin_data', None)
tries = kwargs.pop('tries', ATTEMPTS)
if stdin_data:
kwargs['stdin'] = subprocess.PIPE
out = cStringIO.StringIO()
new_env = kwargs.get('env', {})
env = copy.copy(os.environ)
env.update(new_env)
kwargs['env'] = env
attempt = 0
for attempt in range(1, tries + 1):
attempt_msg = ' (attempt #%d)' % attempt if attempt else ''
if new_env:
print '===Injecting Environment Variables==='
for k, v in sorted(new_env.items()):
print '%s: %s' % (k, v)
print '===Running %s%s===' % (' '.join(args), attempt_msg)
print 'In directory: %s' % cwd
start_time = time.time()
proc = subprocess.Popen(args, **kwargs)
if stdin_data:
proc.stdin.write(stdin_data)
proc.stdin.close()
psprinter = PsPrinter()
# This is here because passing 'sys.stdout' into stdout for proc will
# produce out of order output.
hanging_cr = False
while True:
psprinter.poke()
buf = proc.stdout.read(BUF_SIZE)
if not buf:
break
if hanging_cr:
buf = '\r' + buf
hanging_cr = buf.endswith('\r')
if hanging_cr:
buf = buf[:-1]
buf = buf.replace('\r\n', '\n').replace('\r', '\n')
sys.stdout.write(buf)
out.write(buf)
if hanging_cr:
sys.stdout.write('\n')
out.write('\n')
psprinter.cancel()
code = proc.wait()
elapsed_time = ((time.time() - start_time) / 60.0)
outval = out.getvalue()
result = result_fn(code, outval)
if result in (FAIL, RETRY):
print '===Failed in %.1f mins===' % elapsed_time
print
else:
print '===Succeeded in %.1f mins===' % elapsed_time
print
return outval
if result is FAIL:
break
if result is RETRY and attempt < tries:
sleep_backoff = 4 ** attempt
sleep_time = random.randint(sleep_backoff, int(sleep_backoff * 1.2))
print '===backing off, sleeping for %d secs===' % sleep_time
time.sleep(sleep_time)
raise SubprocessFailed('%s failed with code %d in %s after %d attempts.' %
(' '.join(args), code, cwd, attempt),
code, outval)
def git(*args, **kwargs): # pragma: no cover
"""Wrapper around call specifically for Git commands."""
if args and args[0] == 'cache':
# Rewrite "git cache" calls into "python git_cache.py".
cmd = (sys.executable, '-u', GIT_CACHE_PATH) + args[1:]
else:
git_executable = 'git'
# On windows, subprocess doesn't fuzzy-match 'git' to 'git.bat', so we
# have to do it explicitly. This is better than passing shell=True.
if sys.platform.startswith('win'):
git_executable += '.bat'
cmd = (git_executable,) + args
return call(*cmd, **kwargs)
def get_gclient_spec(solutions, target_os, target_os_only):
return GCLIENT_TEMPLATE % {
'solutions': pprint.pformat(solutions, indent=4),
'cache_dir': '"%s"' % CACHE_DIR,
'target_os': ('\ntarget_os=%s' % target_os) if target_os else '',
'target_os_only': '\ntarget_os_only=%s' % target_os_only
}
def check_enabled(master, builder, slave):
if master in ENABLED_MASTERS:
return True
builder_list = ENABLED_BUILDERS.get(master)
if builder_list and builder in builder_list:
return True
slave_list = ENABLED_SLAVES.get(master)
if slave_list and slave in slave_list:
return True
return False
def check_disabled(master, builder, slave):
"""Returns True if disabled, False if not disabled."""
builder_list = DISABLED_BUILDERS.get(master)
if builder_list and builder in builder_list:
return True
slave_list = DISABLED_SLAVES.get(master)
if slave_list and slave in slave_list:
return True
return False
def check_valid_host(master, builder, slave):
return (check_enabled(master, builder, slave)
and not check_disabled(master, builder, slave))
def maybe_ignore_revision(revision, buildspec):
"""Handle builders that don't care what buildbot tells them to build.
This is especially the case with branch builders that build from buildspecs
and/or trigger off multiple repositories, where the --revision passed in has
nothing to do with the solution being built. Clearing the revision in this
case causes bot_update to use HEAD rather that trying to checkout an
inappropriate version of the solution.
"""
if buildspec and buildspec.container == 'branches':
return []
return revision
def solutions_printer(solutions):
"""Prints gclient solution to stdout."""
print 'Gclient Solutions'
print '================='
for solution in solutions:
name = solution.get('name')
url = solution.get('url')
print '%s (%s)' % (name, url)
if solution.get('deps_file'):
print ' Dependencies file is %s' % solution['deps_file']
if 'managed' in solution:
print ' Managed mode is %s' % ('ON' if solution['managed'] else 'OFF')
custom_vars = solution.get('custom_vars')
if custom_vars:
print ' Custom Variables:'
for var_name, var_value in sorted(custom_vars.iteritems()):
print ' %s = %s' % (var_name, var_value)
custom_deps = solution.get('custom_deps')
if 'custom_deps' in solution:
print ' Custom Dependencies:'
for deps_name, deps_value in sorted(custom_deps.iteritems()):
if deps_value:
print ' %s -> %s' % (deps_name, deps_value)
else:
print ' %s: Ignore' % deps_name
for k, v in solution.iteritems():
# Print out all the keys we don't know about.
if k in ['name', 'url', 'deps_file', 'custom_vars', 'custom_deps',
'managed']:
continue
print ' %s is %s' % (k, v)
print
def solutions_to_git(input_solutions):
"""Modifies urls in solutions to point at Git repos.
returns: (git solution, svn root of first solution) tuple.
"""
assert input_solutions
solutions = copy.deepcopy(input_solutions)
first_solution = True
buildspec = None
for solution in solutions:
original_url = solution['url']
parsed_url = urlparse.urlparse(original_url)
parsed_path = parsed_url.path
# Rewrite SVN urls into Git urls.
buildspec_m = re.match(BUILDSPEC_RE, parsed_path)
if first_solution and buildspec_m:
solution['url'] = GIT_BUILDSPEC_PATH
buildspec = BUILDSPEC_TYPE(
container=buildspec_m.group(1),
version=buildspec_m.group(2),
)
solution['deps_file'] = path.join(buildspec.container, buildspec.version,
'DEPS')
elif parsed_path in RECOGNIZED_PATHS:
solution['url'] = RECOGNIZED_PATHS[parsed_path]
solution['deps_file'] = '.DEPS.git'
elif parsed_url.scheme == 'https' and 'googlesource' in parsed_url.netloc:
pass
else:
print 'Warning: %s' % ('path %r not recognized' % parsed_path,)
# Strip out deps containing $$V8_REV$$, etc.
if 'custom_deps' in solution:
new_custom_deps = {}
for deps_name, deps_value in solution['custom_deps'].iteritems():
if deps_value and '$$' in deps_value:
print 'Dropping %s:%s from custom deps' % (deps_name, deps_value)
else:
new_custom_deps[deps_name] = deps_value
solution['custom_deps'] = new_custom_deps
if first_solution:
root = parsed_path
first_solution = False
solution['managed'] = False
# We don't want gclient to be using a safesync URL. Instead it should
# using the lkgr/lkcr branch/tags.
if 'safesync_url' in solution:
print 'Removing safesync url %s from %s' % (solution['safesync_url'],
parsed_path)
del solution['safesync_url']
return solutions, root, buildspec
def remove(target):
"""Remove a target by moving it into build.dead."""
dead_folder = path.join(BUILDER_DIR, 'build.dead')
if not path.exists(dead_folder):
os.makedirs(dead_folder)
os.rename(target, path.join(dead_folder, uuid.uuid4().hex))
def ensure_no_checkout(dir_names, scm_dirname):
"""Ensure that there is no undesired checkout under build/.
If there is an incorrect checkout under build/, then
move build/ to build.dead/
This function will check each directory in dir_names.
scm_dirname is expected to be either ['.svn', '.git']
"""
assert scm_dirname in ['.svn', '.git', '*']
has_checkout = any(path.exists(path.join(os.getcwd(), dir_name, scm_dirname))
for dir_name in dir_names)
if has_checkout or scm_dirname == '*':
build_dir = os.getcwd()
prefix = ''
if scm_dirname != '*':
prefix = '%s detected in checkout, ' % scm_dirname
for filename in os.listdir(build_dir):
deletion_target = path.join(build_dir, filename)
print '%sdeleting %s...' % (prefix, deletion_target),
remove(deletion_target)
print 'done'
def gclient_configure(solutions, target_os, target_os_only):
"""Should do the same thing as gclient --spec='...'."""
with codecs.open('.gclient', mode='w', encoding='utf-8') as f:
f.write(get_gclient_spec(solutions, target_os, target_os_only))
def gclient_sync(with_branch_heads, shallow):
# We just need to allocate a filename.
fd, gclient_output_file = tempfile.mkstemp(suffix='.json')
os.close(fd)
gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
cmd = [gclient_bin, 'sync', '--verbose', '--reset', '--force',
'--ignore_locks', '--output-json', gclient_output_file,
'--nohooks', '--noprehooks', '--delete_unversioned_trees']
if with_branch_heads:
cmd += ['--with_branch_heads']
if shallow:
cmd += ['--shallow']
try:
call(*cmd, tries=1)
except SubprocessFailed as e:
# Throw a GclientSyncFailed exception so we can catch this independently.
raise GclientSyncFailed(e.message, e.code, e.output)
else:
with open(gclient_output_file) as f:
return json.load(f)
finally:
os.remove(gclient_output_file)
def gclient_runhooks(gyp_envs):
gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
env = dict([env_var.split('=', 1) for env_var in gyp_envs])
call(gclient_bin, 'runhooks', env=env)
def gclient_revinfo():
gclient_bin = 'gclient.bat' if sys.platform.startswith('win') else 'gclient'
return call(gclient_bin, 'revinfo', '-a') or ''
def create_manifest():
manifest = {}
output = gclient_revinfo()
for line in output.strip().splitlines():
match = REVINFO_RE.match(line.strip())
if match:
manifest[match.group(1)] = {
'repository': match.group(2),
'revision': match.group(3),
}
else:
print "WARNING: Couldn't match revinfo line:\n%s" % line
return manifest
def get_commit_message_footer_map(message):
"""Returns: (dict) A dictionary of commit message footer entries.
"""
footers = {}
# Extract the lines in the footer block.
lines = []
for line in message.strip().splitlines():
line = line.strip()
if len(line) == 0:
del lines[:]
continue
lines.append(line)
# Parse the footer
for line in lines:
m = COMMIT_FOOTER_ENTRY_RE.match(line)
if not m:
# If any single line isn't valid, the entire footer is invalid.
footers.clear()
return footers
footers[m.group(1)] = m.group(2).strip()
return footers
def get_commit_message_footer(message, key):
"""Returns: (str/None) The footer value for 'key', or None if none was found.
"""
return get_commit_message_footer_map(message).get(key)
def get_svn_rev(git_hash, dir_name):
log = git('log', '-1', git_hash, cwd=dir_name)
git_svn_id = get_commit_message_footer(log, GIT_SVN_ID_FOOTER_KEY)
if not git_svn_id:
return None
m = GIT_SVN_ID_RE.match(git_svn_id)
if not m:
return None
return int(m.group(2))
def get_git_hash(revision, branch, sln_dir):
"""We want to search for the SVN revision on the git-svn branch.
Note that git will search backwards from origin/master.
"""
match = "^%s: [^ ]*@%s " % (GIT_SVN_ID_FOOTER_KEY, revision)
ref = branch if branch.startswith('refs/') else 'origin/%s' % branch
cmd = ['log', '-E', '--grep', match, '--format=%H', '--max-count=1', ref]
result = git(*cmd, cwd=sln_dir).strip()
if result:
return result
raise SVNRevisionNotFound('We can\'t resolve svn r%s into a git hash in %s' %
(revision, sln_dir))
def _last_commit_for_file(filename, repo_base):
cmd = ['log', '--format=%H', '--max-count=1', '--', filename]
return git(*cmd, cwd=repo_base).strip()
def need_to_run_deps2git(repo_base, deps_file, deps_git_file):
"""Checks to see if we need to run deps2git.
Returns True if there was a DEPS change after the last .DEPS.git update
or if DEPS has local modifications.
"""
# See if DEPS is dirty
deps_file_status = git(
'status', '--porcelain', deps_file, cwd=repo_base).strip()
if deps_file_status and deps_file_status.startswith('M '):
return True
last_known_deps_ref = _last_commit_for_file(deps_file, repo_base)
last_known_deps_git_ref = _last_commit_for_file(deps_git_file, repo_base)
merge_base_ref = git('merge-base', last_known_deps_ref,
last_known_deps_git_ref, cwd=repo_base).strip()
# If the merge base of the last DEPS and last .DEPS.git file is not
# equivilent to the hash of the last DEPS file, that means the DEPS file
# was committed after the last .DEPS.git file.
return last_known_deps_ref != merge_base_ref
def ensure_deps2git(solution, shallow):
repo_base = path.join(os.getcwd(), solution['name'])
deps_file = path.join(repo_base, 'DEPS')
deps_git_file = path.join(repo_base, '.DEPS.git')
if (not git('ls-files', 'DEPS', cwd=repo_base).strip() or
not git('ls-files', '.DEPS.git', cwd=repo_base).strip()):
return
print 'Checking if %s is newer than %s' % (deps_file, deps_git_file)
if not need_to_run_deps2git(repo_base, deps_file, deps_git_file):
return
print '===DEPS file modified, need to run deps2git==='
cmd = [sys.executable, DEPS2GIT_PATH,
'--workspace', os.getcwd(),
'--cache_dir', CACHE_DIR,
'--deps', deps_file,
'--out', deps_git_file]
if 'chrome-internal.googlesource' in solution['url']:
cmd.extend(['--extra-rules', S2G_INTERNAL_PATH])
if shallow:
cmd.append('--shallow')
call(*cmd)
def emit_log_lines(name, lines):
for line in lines.splitlines():
print '@@@STEP_LOG_LINE@%s@%s@@@' % (name, line)
print '@@@STEP_LOG_END@%s@@@' % name
def emit_properties(properties):
for property_name, property_value in sorted(properties.items()):
print '@@@SET_BUILD_PROPERTY@%s@"%s"@@@' % (property_name, property_value)
# Derived from:
# http://code.activestate.com/recipes/577972-disk-usage/?in=user-4178764
def get_total_disk_space():
cwd = os.getcwd()
# Windows is the only platform that doesn't support os.statvfs, so
# we need to special case this.
if sys.platform.startswith('win'):
_, total, free = (ctypes.c_ulonglong(), ctypes.c_ulonglong(), \
ctypes.c_ulonglong())
if sys.version_info >= (3,) or isinstance(cwd, unicode):
fn = ctypes.windll.kernel32.GetDiskFreeSpaceExW
else:
fn = ctypes.windll.kernel32.GetDiskFreeSpaceExA
ret = fn(cwd, ctypes.byref(_), ctypes.byref(total), ctypes.byref(free))
if ret == 0:
# WinError() will fetch the last error code.
raise ctypes.WinError()
return (total.value, free.value)
else:
st = os.statvfs(cwd)
free = st.f_bavail * st.f_frsize
total = st.f_blocks * st.f_frsize
return (total, free)
def get_target_revision(folder_name, git_url, revisions):
normalized_name = folder_name.strip('/')
if normalized_name in revisions:
return revisions[normalized_name]
if git_url in revisions:
return revisions[git_url]
return None
def force_revision(folder_name, revision):
split_revision = revision.split(':', 1)
branch = 'master'
if len(split_revision) == 2:
# Support for "branch:revision" syntax.
branch, revision = split_revision
if revision and revision.upper() != 'HEAD':
if revision and revision.isdigit() and len(revision) < 40:
# rev_num is really a svn revision number, convert it into a git hash.
git_ref = get_git_hash(int(revision), branch, folder_name)
else:
# rev_num is actually a git hash or ref, we can just use it.
git_ref = revision
git('checkout', '--force', git_ref, cwd=folder_name)
else:
ref = branch if branch.startswith('refs/') else 'origin/%s' % branch
git('checkout', '--force', ref, cwd=folder_name)
def git_checkout(solutions, revisions, shallow, refs):
build_dir = os.getcwd()
# Before we do anything, break all git_cache locks.
if path.isdir(CACHE_DIR):
git('cache', 'unlock', '-vv', '--force', '--all', '--cache-dir', CACHE_DIR)
for item in os.listdir(CACHE_DIR):
filename = os.path.join(CACHE_DIR, item)
if item.endswith('.lock'):
raise Exception('%s exists after cache unlock' % filename)
first_solution = True
for sln in solutions:
# This is so we can loop back and try again if we need to wait for the
# git mirrors to update from SVN.
done = False
tries_left = 60
while not done:
name = sln['name']
url = sln['url']
if url == CHROMIUM_SRC_URL or url + '.git' == CHROMIUM_SRC_URL:
# Experiments show there's little to be gained from
# a shallow clone of src.
shallow = False
sln_dir = path.join(build_dir, name)
s = ['--shallow'] if shallow else []
populate_cmd = (['cache', 'populate', '--ignore_locks', '-v',
'--cache-dir', CACHE_DIR] + s + [url])
for ref in refs:
populate_cmd.extend(['--ref', ref])
git(*populate_cmd)
mirror_dir = git(
'cache', 'exists', '--quiet', '--cache-dir', CACHE_DIR, url).strip()
clone_cmd = (
'clone', '--no-checkout', '--local', '--shared', mirror_dir, sln_dir)
try:
if not path.isdir(sln_dir):
git(*clone_cmd)
else:
git('remote', 'set-url', 'origin', mirror_dir, cwd=sln_dir)
git('fetch', 'origin', cwd=sln_dir)
for ref in refs:
refspec = '%s:%s' % (ref, ref.lstrip('+'))
git('fetch', 'origin', refspec, cwd=sln_dir)
revision = get_target_revision(name, url, revisions) or 'HEAD'
force_revision(sln_dir, revision)
done = True
except SubprocessFailed as e:
# Exited abnormally, theres probably something wrong.
# Lets wipe the checkout and try again.
tries_left -= 1
if tries_left > 0:
print 'Something failed: %s.' % str(e)
print 'waiting 5 seconds and trying again...'
time.sleep(5)
else:
raise
remove(sln_dir)
except SVNRevisionNotFound:
tries_left -= 1
if tries_left > 0:
# If we don't have the correct revision, wait and try again.
print 'We can\'t find revision %s.' % revision
print 'The svn to git replicator is probably falling behind.'
print 'waiting 5 seconds and trying again...'
time.sleep(5)
else:
raise
git('clean', '-dff', cwd=sln_dir)
if first_solution:
git_ref = git('log', '--format=%H', '--max-count=1',
cwd=sln_dir).strip()
first_solution = False
return git_ref
def _download(url):
"""Fetch url and return content, with retries for flake."""
for attempt in xrange(ATTEMPTS):
try:
return urllib2.urlopen(url).read()
except Exception:
if attempt == ATTEMPTS - 1:
raise
def parse_diff(diff):
"""Takes a unified diff and returns a list of diffed files and their diffs.
The return format is a list of pairs of:
(<filename>, <diff contents>)
<diff contents> is inclusive of the diff line.
"""
result = []
current_diff = ''
current_header = None
for line in diff.splitlines():
# "diff" is for git style patches, and "Index: " is for SVN style patches.
if line.startswith('diff') or line.startswith('Index: '):
if current_header:
# If we are in a diff portion, then save the diff.
result.append((current_header, '%s\n' % current_diff))
git_header_match = re.match(r'diff (?:--git )?(\S+) (\S+)', line)
svn_header_match = re.match(r'Index: (.*)', line)
if git_header_match:
# First, see if its a git style header.
from_file = git_header_match.group(1)
to_file = git_header_match.group(2)
if from_file != to_file and from_file.startswith('a/'):
# Sometimes git prepends 'a/' and 'b/' in front of file paths.
from_file = from_file[2:]
current_header = from_file
elif svn_header_match:
# Otherwise, check if its an SVN style header.
current_header = svn_header_match.group(1)
else:
# Otherwise... I'm not really sure what to do with this.
raise InvalidDiff('Can\'t process header: %s\nFull diff:\n%s' %
(line, diff))
current_diff = ''
current_diff += '%s\n' % line
if current_header:
# We hit EOF, gotta save the last diff.
result.append((current_header, current_diff))
return result
def get_svn_patch(patch_url):
"""Fetch patch from patch_url, return list of (filename, diff)"""
svn_exe = 'svn.bat' if sys.platform.startswith('win') else 'svn'
patch_data = call(svn_exe, 'cat', patch_url)
return parse_diff(patch_data)
def apply_svn_patch(patch_root, patches, whitelist=None, blacklist=None):
"""Expects a list of (filename, diff), applies it on top of patch_root."""
if whitelist:
patches = [(name, diff) for name, diff in patches if name in whitelist]
elif blacklist:
patches = [(name, diff) for name, diff in patches if name not in blacklist]
diffs = [diff for _, diff in patches]
patch = ''.join(diffs)
if patch:
print '===Patching files==='
for filename, _ in patches:
print 'Patching %s' % filename
try:
call(PATCH_TOOL, '-p0', '--remove-empty-files', '--force', '--forward',
stdin_data=patch, cwd=patch_root, tries=1)
for filename, _ in patches:
full_filename = path.abspath(path.join(patch_root, filename))
git('add', full_filename, cwd=path.dirname(full_filename))
except SubprocessFailed as e:
raise PatchFailed(e.message, e.code, e.output)
def apply_rietveld_issue(issue, patchset, root, server, _rev_map, _revision,
email_file, key_file, whitelist=None, blacklist=None):
apply_issue_bin = ('apply_issue.bat' if sys.platform.startswith('win')
else 'apply_issue')
cmd = [apply_issue_bin,
# The patch will be applied on top of this directory.
'--root_dir', root,
# Tell apply_issue how to fetch the patch.
'--issue', issue,
'--server', server,
# Always run apply_issue.py, otherwise it would see update.flag
# and then bail out.
'--force',
# Don't run gclient sync when it sees a DEPS change.
'--ignore_deps',
# TODO(tandrii): remove after http://crbug.com/537417 is resolved.
# Temporary enable verbosity to see if Rietveld requests are actually
# retried.
'-v', '-v', # = logging.DEBUG level.
]
# Use an oauth key file if specified.
if email_file and key_file:
cmd.extend(['--email-file', email_file, '--private-key-file', key_file])
else:
cmd.append('--no-auth')
if patchset:
cmd.extend(['--patchset', patchset])
if whitelist:
for item in whitelist:
cmd.extend(['--whitelist', item])
elif blacklist:
for item in blacklist:
cmd.extend(['--blacklist', item])
# Only try once, since subsequent failures hide the real failure.
try:
call(*cmd, tries=1)
except SubprocessFailed as e:
raise PatchFailed(e.message, e.code, e.output)
def apply_gerrit_ref(gerrit_repo, gerrit_ref, root):
gerrit_repo = gerrit_repo or 'origin'
assert gerrit_ref
try:
base_rev = git('rev-parse', 'HEAD', cwd=root).strip()
git('retry', 'fetch', gerrit_repo, gerrit_ref, cwd=root, tries=1)
git('checkout', 'FETCH_HEAD', cwd=root)
git('reset', '--soft', base_rev, cwd=root)
except SubprocessFailed as e:
raise PatchFailed(e.message, e.code, e.output)
def check_flag(flag_file):
"""Returns True if the flag file is present."""
return os.path.isfile(flag_file)
def delete_flag(flag_file):
"""Remove bot update flag."""
if os.path.isfile(flag_file):
os.remove(flag_file)
def emit_flag(flag_file):
"""Deposit a bot update flag on the system to tell gclient not to run."""
print 'Emitting flag file at %s' % flag_file
with open(flag_file, 'wb') as f:
f.write('Success!')
def get_commit_position_for_git_svn(url, revision):
"""Generates a commit position string for a 'git-svn' URL/revision.
If the 'git-svn' URL maps to a known project, we will construct a commit
position branch value by applying substitution on the SVN URL.
"""
# Identify the base URL so we can strip off trunk/branch name
project_config = branch = None
for _, project_config in GIT_SVN_PROJECT_MAP.iteritems():
if url.startswith(project_config['svn_url']):
branch = url[len(project_config['svn_url']):]
break
if branch:
# Strip any leading slashes
branch = branch.lstrip('/')
# Try and map the branch
for pattern, repl in project_config.get('branch_map', ()):
nbranch, subn = re.subn(pattern, repl, branch, count=1)
if subn:
print 'INFO: Mapped SVN branch to Git branch [%s] => [%s]' % (
branch, nbranch)
branch = nbranch
break
else:
# Use generic 'svn' branch
print 'INFO: Could not resolve project for SVN URL %r' % (url,)
branch = 'svn'
return '%s@{#%s}' % (branch, revision)
def get_commit_position(git_path, revision='HEAD'):
"""Dumps the 'git' log for a specific revision and parses out the commit
position.
If a commit position metadata key is found, its value will be returned.
Otherwise, we will search for a 'git-svn' metadata entry. If one is found,
we will compose a commit position from it, using its SVN revision value as
the revision.
If the 'git-svn' URL maps to a known project, we will construct a commit
position branch value by truncating the URL, mapping 'trunk' to
"refs/heads/master". Otherwise, we will return the generic branch, 'svn'.
"""
git_log = git('log', '--format=%B', '-n1', revision, cwd=git_path)
footer_map = get_commit_message_footer_map(git_log)
# Search for commit position metadata
value = (footer_map.get(COMMIT_POSITION_FOOTER_KEY) or
footer_map.get(COMMIT_ORIGINAL_POSITION_FOOTER_KEY))
if value:
return value
# Compose a commit position from 'git-svn' metadata
value = footer_map.get(GIT_SVN_ID_FOOTER_KEY)
if value:
m = GIT_SVN_ID_RE.match(value)
if not m:
raise ValueError("Invalid 'git-svn' value: [%s]" % (value,))
return get_commit_position_for_git_svn(m.group(1), m.group(2))
return None
def parse_got_revision(gclient_output, got_revision_mapping, use_svn_revs):
"""Translate git gclient revision mapping to build properties.
If use_svn_revs is True, then translate git hashes in the revision mapping
to svn revision numbers.
"""
properties = {}
solutions_output = {
# Make sure path always ends with a single slash.
'%s/' % path.rstrip('/') : solution_output for path, solution_output
in gclient_output['solutions'].iteritems()
}
for dir_name, property_name in got_revision_mapping.iteritems():
# Make sure dir_name always ends with a single slash.
dir_name = '%s/' % dir_name.rstrip('/')
if dir_name not in solutions_output:
continue
solution_output = solutions_output[dir_name]
if solution_output.get('scm') is None:
# This is an ignored DEPS, so the output got_revision should be 'None'.
git_revision = revision = commit_position = None
else:
# Since we are using .DEPS.git, everything had better be git.
assert solution_output.get('scm') == 'git'
git_revision = git('rev-parse', 'HEAD', cwd=dir_name).strip()
if use_svn_revs:
revision = get_svn_rev(git_revision, dir_name)
if not revision:
revision = git_revision
else:
revision = git_revision
commit_position = get_commit_position(dir_name)
properties[property_name] = revision
if revision != git_revision:
properties['%s_git' % property_name] = git_revision
if commit_position:
properties['%s_cp' % property_name] = commit_position
return properties
def emit_json(out_file, did_run, gclient_output=None, **kwargs):
"""Write run information into a JSON file."""
output = {}
output.update(gclient_output if gclient_output else {})
output.update({'did_run': did_run})
output.update(kwargs)
with open(out_file, 'wb') as f:
f.write(json.dumps(output, sort_keys=True))
def ensure_deps_revisions(deps_url_mapping, solutions, revisions):
"""Ensure correct DEPS revisions, ignores solutions."""
for deps_name, deps_data in sorted(deps_url_mapping.items()):
if deps_name.strip('/') in solutions:
# This has already been forced to the correct solution by git_checkout().
continue
revision = get_target_revision(deps_name, deps_data.get('url', None),
revisions)
if not revision:
continue
# TODO(hinoka): Catch SVNRevisionNotFound error maybe?
git('fetch', 'origin', cwd=deps_name)
force_revision(deps_name, revision)
def ensure_checkout(solutions, revisions, first_sln, target_os, target_os_only,
patch_root, issue, patchset, patch_url, rietveld_server,
gerrit_repo, gerrit_ref, revision_mapping,
apply_issue_email_file, apply_issue_key_file, buildspec,
gyp_env, shallow, runhooks, refs):
# Get a checkout of each solution, without DEPS or hooks.
# Calling git directly because there is no way to run Gclient without
# invoking DEPS.
print 'Fetching Git checkout'
git_ref = git_checkout(solutions, revisions, shallow, refs)
patches = None
if patch_url:
patches = get_svn_patch(patch_url)
already_patched = []
patch_root = patch_root or ''
for solution in solutions:
if (patch_root == solution['name'] or
solution['name'].startswith(patch_root + '/')):
relative_root = solution['name'][len(patch_root) + 1:]
target = '/'.join([relative_root, 'DEPS']).lstrip('/')
if patches:
apply_svn_patch(patch_root, patches, whitelist=[target])
already_patched.append(target)
elif issue:
apply_rietveld_issue(issue, patchset, patch_root, rietveld_server,
revision_mapping, git_ref, apply_issue_email_file,
apply_issue_key_file, whitelist=[target])
already_patched.append(target)
if not buildspec:
# Run deps2git if there is a DEPS change after the last .DEPS.git commit.
for solution in solutions:
ensure_deps2git(solution, shallow)
# Ensure our build/ directory is set up with the correct .gclient file.
gclient_configure(solutions, target_os, target_os_only)
# Let gclient do the DEPS syncing.
# The branch-head refspec is a special case because its possible Chrome
# src, which contains the branch-head refspecs, is DEPSed in.
gclient_output = gclient_sync(buildspec or BRANCH_HEADS_REFSPEC in refs,
shallow)
# Now that gclient_sync has finished, we should revert any .DEPS.git so that
# presubmit doesn't complain about it being modified.
if (not buildspec and
git('ls-files', '.DEPS.git', cwd=first_sln).strip()):
git('checkout', 'HEAD', '--', '.DEPS.git', cwd=first_sln)
if buildspec and runhooks:
# Run gclient runhooks if we're on an official builder.
# TODO(hinoka): Remove this when the official builders run their own
# runhooks step.
gclient_runhooks(gyp_env)
# Finally, ensure that all DEPS are pinned to the correct revision.
dir_names = [sln['name'] for sln in solutions]
ensure_deps_revisions(gclient_output.get('solutions', {}),
dir_names, revisions)
# Apply the rest of the patch here (sans DEPS)
if patches:
apply_svn_patch(patch_root, patches, blacklist=already_patched)
elif issue:
apply_rietveld_issue(issue, patchset, patch_root, rietveld_server,
revision_mapping, git_ref, apply_issue_email_file,
apply_issue_key_file, blacklist=already_patched)
elif gerrit_ref:
apply_gerrit_ref(gerrit_repo, gerrit_ref, patch_root)
# Reset the deps_file point in the solutions so that hooks get run properly.
for sln in solutions:
sln['deps_file'] = sln.get('deps_file', 'DEPS').replace('.DEPS.git', 'DEPS')
gclient_configure(solutions, target_os, target_os_only)
return gclient_output
def parse_revisions(revisions, root):
"""Turn a list of revision specs into a nice dictionary.
We will always return a dict with {root: something}. By default if root
is unspecified, or if revisions is [], then revision will be assigned 'HEAD'
"""
results = {root.strip('/'): 'HEAD'}
expanded_revisions = []
for revision in revisions:
# Allow rev1,rev2,rev3 format.
# TODO(hinoka): Delete this when webkit switches to recipes.
expanded_revisions.extend(revision.split(','))
for revision in expanded_revisions:
split_revision = revision.split('@')
if len(split_revision) == 1:
# This is just a plain revision, set it as the revision for root.
results[root] = split_revision[0]
elif len(split_revision) == 2:
# This is an alt_root@revision argument.
current_root, current_rev = split_revision
# We want to normalize svn/git urls into .git urls.
parsed_root = urlparse.urlparse(current_root)
if parsed_root.scheme == 'svn':
if parsed_root.path in RECOGNIZED_PATHS:
normalized_root = RECOGNIZED_PATHS[parsed_root.path]
else:
print 'WARNING: SVN path %s not recognized, ignoring' % current_root
continue
elif parsed_root.scheme in ['http', 'https']:
normalized_root = 'https://%s/%s' % (parsed_root.netloc,
parsed_root.path)
if not normalized_root.endswith('.git'):
normalized_root = '%s.git' % normalized_root
elif parsed_root.scheme:
print 'WARNING: Unrecognized scheme %s, ignoring' % parsed_root.scheme
continue
else:
# This is probably a local path.
normalized_root = current_root.strip('/')
results[normalized_root] = current_rev
else:
print ('WARNING: %r is not recognized as a valid revision specification,'
'skipping' % revision)
return results
def parse_args():
parse = optparse.OptionParser()
parse.add_option('--issue', help='Issue number to patch from.')
parse.add_option('--patchset',
help='Patchset from issue to patch from, if applicable.')
parse.add_option('--apply_issue_email_file',
help='--email-file option passthrough for apply_patch.py.')
parse.add_option('--apply_issue_key_file',
help='--private-key-file option passthrough for '
'apply_patch.py.')
parse.add_option('--patch_url', help='Optional URL to SVN patch.')
parse.add_option('--root', dest='patch_root',
help='DEPRECATED: Use --patch_root.')
parse.add_option('--patch_root', help='Directory to patch on top of.')
parse.add_option('--rietveld_server',
default='codereview.chromium.org',
help='Rietveld server.')
parse.add_option('--gerrit_repo',
help='Gerrit repository to pull the ref from.')
parse.add_option('--gerrit_ref', help='Gerrit ref to apply.')
parse.add_option('--specs', help='Gcilent spec.')
parse.add_option('--master', help='Master name.')
parse.add_option('-f', '--force', action='store_true',
help='Bypass check to see if we want to be run. '
'Should ONLY be used locally or by smart recipes.')
parse.add_option('--revision_mapping',
help='{"path/to/repo/": "property_name"}')
parse.add_option('--revision_mapping_file',
help=('Same as revision_mapping, except its a path to a json'
' file containing that format.'))
parse.add_option('--revision', action='append', default=[],
help='Revision to check out. Can be an SVN revision number, '
'git hash, or any form of git ref. Can prepend '
'root@<rev> to specify which repository, where root '
'is either a filesystem path, git https url, or '
'svn url. To specify Tip of Tree, set rev to HEAD.'
'To specify a git branch and an SVN rev, <rev> can be '
'set to <branch>:<revision>.')
parse.add_option('--output_manifest', action='store_true',
help=('Add manifest json to the json output.'))
parse.add_option('--slave_name', default=socket.getfqdn().split('.')[0],
help='Hostname of the current machine, '
'used for determining whether or not to activate.')
parse.add_option('--builder_name', help='Name of the builder, '
'used for determining whether or not to activate.')
parse.add_option('--build_dir', default=os.getcwd())
parse.add_option('--flag_file', default=path.join(os.getcwd(),
'update.flag'))
parse.add_option('--shallow', action='store_true',
help='Use shallow clones for cache repositories.')
parse.add_option('--gyp_env', action='append', default=[],
help='Environment variables to pass into gclient runhooks.')
parse.add_option('--clobber', action='store_true',
help='Delete checkout first, always')
parse.add_option('--bot_update_clobber', action='store_true', dest='clobber',
help='(synonym for --clobber)')
parse.add_option('-o', '--output_json',
help='Output JSON information into a specified file')
parse.add_option('--no_shallow', action='store_true',
help='Bypass disk detection and never shallow clone. '
'Does not override the --shallow flag')
parse.add_option('--no_runhooks', action='store_true',
help='Do not run hooks on official builder.')
parse.add_option('--refs', action='append',
help='Also fetch this refspec for the main solution(s). '
'Eg. +refs/branch-heads/*')
parse.add_option('--with_branch_heads', action='store_true',
help='Always pass --with_branch_heads to gclient. This '
'does the same thing as --refs +refs/branch-heads/*')
options, args = parse.parse_args()
if not options.refs:
options.refs = []
if options.with_branch_heads:
options.refs.append(BRANCH_HEADS_REFSPEC)
del options.with_branch_heads
try:
if options.revision_mapping_file:
if options.revision_mapping:
print ('WARNING: Ignoring --revision_mapping: --revision_mapping_file '
'was set at the same time as --revision_mapping?')
with open(options.revision_mapping_file, 'r') as f:
options.revision_mapping = json.load(f)
elif options.revision_mapping:
options.revision_mapping = json.loads(options.revision_mapping)
except Exception as e:
print (
'WARNING: Caught execption while parsing revision_mapping*: %s'
% (str(e),)
)
return options, args
def prepare(options, git_slns, active):
"""Prepares the target folder before we checkout."""
dir_names = [sln.get('name') for sln in git_slns if 'name' in sln]
# If we're active now, but the flag file doesn't exist (we weren't active
# last run) or vice versa, blow away all checkouts.
if bool(active) != bool(check_flag(options.flag_file)):
ensure_no_checkout(dir_names, '*')
if options.output_json:
# Make sure we tell recipes that we didn't run if the script exits here.
emit_json(options.output_json, did_run=active)
if active:
if options.clobber:
ensure_no_checkout(dir_names, '*')
else:
ensure_no_checkout(dir_names, '.svn')
emit_flag(options.flag_file)
else:
delete_flag(options.flag_file)
raise Inactive # This is caught in main() and we exit cleanly.
# Do a shallow checkout if the disk is less than 100GB.
total_disk_space, free_disk_space = get_total_disk_space()
total_disk_space_gb = int(total_disk_space / (1024 * 1024 * 1024))
used_disk_space_gb = int((total_disk_space - free_disk_space)
/ (1024 * 1024 * 1024))
percent_used = int(used_disk_space_gb * 100 / total_disk_space_gb)
step_text = '[%dGB/%dGB used (%d%%)]' % (used_disk_space_gb,
total_disk_space_gb,
percent_used)
if not options.output_json:
print '@@@STEP_TEXT@%s@@@' % step_text
if not options.shallow:
options.shallow = (total_disk_space < SHALLOW_CLONE_THRESHOLD
and not options.no_shallow)
# The first solution is where the primary DEPS file resides.
first_sln = dir_names[0]
# Split all the revision specifications into a nice dict.
print 'Revisions: %s' % options.revision
revisions = parse_revisions(options.revision, first_sln)
print 'Fetching Git checkout at %s@%s' % (first_sln, revisions[first_sln])
return revisions, step_text
def checkout(options, git_slns, specs, buildspec, master,
svn_root, revisions, step_text):
first_sln = git_slns[0]['name']
dir_names = [sln.get('name') for sln in git_slns if 'name' in sln]
try:
# Outer try is for catching patch failures and exiting gracefully.
# Inner try is for catching gclient failures and retrying gracefully.
try:
checkout_parameters = dict(
# First, pass in the base of what we want to check out.
solutions=git_slns,
revisions=revisions,
first_sln=first_sln,
# Also, target os variables for gclient.
target_os=specs.get('target_os', []),
target_os_only=specs.get('target_os_only', False),
# Then, pass in information about how to patch.
patch_root=options.patch_root,
issue=options.issue,
patchset=options.patchset,
patch_url=options.patch_url,
rietveld_server=options.rietveld_server,
gerrit_repo=options.gerrit_repo,
gerrit_ref=options.gerrit_ref,
revision_mapping=options.revision_mapping,
apply_issue_email_file=options.apply_issue_email_file,
apply_issue_key_file=options.apply_issue_key_file,
# For official builders.
buildspec=buildspec,
gyp_env=options.gyp_env,
runhooks=not options.no_runhooks,
# Finally, extra configurations such as shallowness of the clone.
shallow=options.shallow,
refs=options.refs)
gclient_output = ensure_checkout(**checkout_parameters)
except GclientSyncFailed:
print 'We failed gclient sync, lets delete the checkout and retry.'
ensure_no_checkout(dir_names, '*')
gclient_output = ensure_checkout(**checkout_parameters)
except PatchFailed as e:
if options.output_json:
# Tell recipes information such as root, got_revision, etc.
emit_json(options.output_json,
did_run=True,
root=first_sln,
log_lines=[('patch error', e.output),],
patch_apply_return_code=e.code,
patch_root=options.patch_root,
patch_failure=True,
step_text='%s PATCH FAILED' % step_text,
fixed_revisions=revisions)
else:
# If we're not on recipes, tell annotator about our got_revisions.
emit_log_lines('patch error', e.output)
print '@@@STEP_TEXT@%s PATCH FAILED@@@' % step_text
raise
# Revision is an svn revision, unless it's a git master.
use_svn_rev = master not in GIT_MASTERS
# Take care of got_revisions outputs.
revision_mapping = dict(GOT_REVISION_MAPPINGS.get(svn_root, {}))
if options.revision_mapping:
revision_mapping.update(options.revision_mapping)
# If the repo is not in the default GOT_REVISION_MAPPINGS and no
# revision_mapping were specified on the command line then
# default to setting 'got_revision' based on the first solution.
if not revision_mapping:
revision_mapping[first_sln] = 'got_revision'
got_revisions = parse_got_revision(gclient_output, revision_mapping,
use_svn_rev)
if not got_revisions:
# TODO(hinoka): We should probably bail out here, but in the interest
# of giving mis-configured bots some time to get fixed use a dummy
# revision here.
got_revisions = { 'got_revision': 'BOT_UPDATE_NO_REV_FOUND' }
#raise Exception('No got_revision(s) found in gclient output')
if options.output_json:
manifest = create_manifest() if options.output_manifest else None
# Tell recipes information such as root, got_revision, etc.
emit_json(options.output_json,
did_run=True,
root=first_sln,
patch_root=options.patch_root,
step_text=step_text,
fixed_revisions=revisions,
properties=got_revisions,
manifest=manifest)
else:
# If we're not on recipes, tell annotator about our got_revisions.
emit_properties(got_revisions)
def print_help_text(force, output_json, active, master, builder, slave):
"""Print helpful messages to tell devs whats going on."""
if force and output_json:
recipe_force = 'Forced on by recipes'
elif active and output_json:
recipe_force = 'Off by recipes, but forced on by bot update'
elif not active and output_json:
recipe_force = 'Forced off by recipes'
else:
recipe_force = 'N/A. Was not called by recipes'
print BOT_UPDATE_MESSAGE % {
'master': master or 'Not specified',
'builder': builder or 'Not specified',
'slave': slave or 'Not specified',
'recipe': recipe_force,
'CURRENT_DIR': CURRENT_DIR,
'BUILDER_DIR': BUILDER_DIR,
'SLAVE_DIR': SLAVE_DIR,
'THIS_DIR': THIS_DIR,
'SCRIPTS_DIR': SCRIPTS_DIR,
'BUILD_DIR': BUILD_DIR,
'ROOT_DIR': ROOT_DIR,
'DEPOT_TOOLS_DIR': DEPOT_TOOLS_DIR,
},
print ACTIVATED_MESSAGE if active else NOT_ACTIVATED_MESSAGE
def main():
# Get inputs.
options, _ = parse_args()
builder = options.builder_name
slave = options.slave_name
master = options.master
# Check if this script should activate or not.
active = check_valid_host(master, builder, slave) or options.force or False
# Print a helpful message to tell developers whats going on with this step.
print_help_text(
options.force, options.output_json, active, master, builder, slave)
# Parse, munipulate, and print the gclient solutions.
specs = {}
exec(options.specs, specs)
svn_solutions = specs.get('solutions', [])
git_slns, svn_root, buildspec = solutions_to_git(svn_solutions)
options.revision = maybe_ignore_revision(options.revision, buildspec)
solutions_printer(git_slns)
try:
# Dun dun dun, the main part of bot_update.
revisions, step_text = prepare(options, git_slns, active)
checkout(options, git_slns, specs, buildspec, master, svn_root, revisions,
step_text)
except Inactive:
# Not active, should count as passing.
pass
except PatchFailed as e:
emit_flag(options.flag_file)
# Return a specific non-zero exit code for patch failure (because it is
# a failure), but make it different than other failures to distinguish
# between infra failures (independent from patch author), and patch
# failures (that patch author can fix). However, PatchFailure due to
# download patch failure is still an infra problem.
if e.code == 3:
# Patch download problem.
return 87
# Genuine patch problem.
return 88
except Exception:
# Unexpected failure.
emit_flag(options.flag_file)
raise
else:
emit_flag(options.flag_file)
if __name__ == '__main__':
sys.exit(main())
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import hashlib
import os
import struct
import sys
from recipe_engine import recipe_test_api
# TODO(phajdan.jr): Clean up this somewhat ugly import.
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'resources'))
import bot_update
class BotUpdateTestApi(recipe_test_api.RecipeTestApi):
def output_json(self, master, builder, slave, root, first_sln,
revision_mapping, git_mode, force=False, fail_patch=False,
output_manifest=False, fixed_revisions=None):
"""Deterministically synthesize json.output test data for gclient's
--output-json option.
"""
active = bot_update.check_valid_host(master, builder, slave) or force
output = {
'did_run': active,
'patch_failure': False
}
# Add in extra json output if active.
if active:
properties = {
property_name: self.gen_revision(project_name, git_mode)
for project_name, property_name in revision_mapping.iteritems()
}
properties.update({
'%s_cp' % property_name: ('refs/heads/master@{#%s}' %
self.gen_revision(project_name, False))
for project_name, property_name in revision_mapping.iteritems()
})
# We also want to simulate outputting "got_revision_git": ...
# when git mode is off to match what bot_update.py does.
if not git_mode:
properties.update({
'%s_git' % property_name: self.gen_revision(project_name, True)
for project_name, property_name in revision_mapping.iteritems()
})
output.update({
'patch_root': root or first_sln,
'root': first_sln,
'properties': properties,
'step_text': 'Some step text'
})
if output_manifest:
output.update({
'manifest': {
project_name: {
'repository': 'https://fake.org/%s.git' % project_name,
'revision': self.gen_revision(project_name, git_mode),
}
for project_name in revision_mapping
}
})
if fixed_revisions:
output['fixed_revisions'] = fixed_revisions
if fail_patch:
output['log_lines'] = [('patch error', 'Patch failed to apply'),]
output['patch_failure'] = True
output['patch_apply_return_code'] = 1
if fail_patch == 'download':
output['patch_apply_return_code'] = 3
return self.m.json.output(output)
@staticmethod
def gen_revision(project, GIT_MODE):
"""Hash project to bogus deterministic revision values."""
h = hashlib.sha1(project)
if GIT_MODE:
return h.hexdigest()
else:
return struct.unpack('!I', h.digest()[:4])[0] % 300000
DEPS = [
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/step',
'tryserver',
]
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import recipe_api
class RevisionResolver(object):
"""Resolves the revision based on build properties."""
def resolve(self, properties): # pragma: no cover
raise NotImplementedError()
class RevisionFallbackChain(RevisionResolver):
"""Specify that a given project's sync revision follows the fallback chain."""
def __init__(self, default=None):
self._default = default
def resolve(self, properties):
"""Resolve the revision via the revision fallback chain.
If the given revision was set using the revision_fallback_chain() function,
this function will follow the chain, looking at relevant build properties
until it finds one set or reaches the end of the chain and returns the
default. If the given revision was not set using revision_fallback_chain(),
this function just returns it as-is.
"""
return (properties.get('parent_got_revision') or
properties.get('orig_revision') or
properties.get('revision') or
self._default)
def jsonish_to_python(spec, is_top=False):
ret = ''
if is_top: # We're the 'top' level, so treat this dict as a suite.
ret = '\n'.join(
'%s = %s' % (k, jsonish_to_python(spec[k])) for k in sorted(spec)
)
else:
if isinstance(spec, dict):
ret += '{'
ret += ', '.join(
"%s: %s" % (repr(str(k)), jsonish_to_python(spec[k]))
for k in sorted(spec)
)
ret += '}'
elif isinstance(spec, list):
ret += '['
ret += ', '.join(jsonish_to_python(x) for x in spec)
ret += ']'
elif isinstance(spec, basestring):
ret = repr(str(spec))
else:
ret = repr(spec)
return ret
class GclientApi(recipe_api.RecipeApi):
# Singleton object to indicate to checkout() that we should run a revert if
# we detect that we're on the tryserver.
RevertOnTryserver = object()
def __init__(self, **kwargs):
super(GclientApi, self).__init__(**kwargs)
self.USE_MIRROR = None
self._spec_alias = None
def __call__(self, name, cmd, infra_step=True, **kwargs):
"""Wrapper for easy calling of gclient steps."""
assert isinstance(cmd, (list, tuple))
prefix = 'gclient '
if self.spec_alias:
prefix = ('[spec: %s] ' % self.spec_alias) + prefix
return self.m.python(prefix + name,
self.m.path['depot_tools'].join('gclient.py'),
cmd,
infra_step=infra_step,
**kwargs)
@property
def use_mirror(self):
"""Indicates if gclient will use mirrors in its configuration."""
if self.USE_MIRROR is None:
self.USE_MIRROR = self.m.properties.get('use_mirror', True)
return self.USE_MIRROR
@use_mirror.setter
def use_mirror(self, val): # pragma: no cover
self.USE_MIRROR = val
@property
def spec_alias(self):
"""Optional name for the current spec for step naming."""
return self._spec_alias
@spec_alias.setter
def spec_alias(self, name):
self._spec_alias = name
@spec_alias.deleter
def spec_alias(self):
self._spec_alias = None
def get_config_defaults(self):
ret = {
'USE_MIRROR': self.use_mirror
}
ret['CACHE_DIR'] = self.m.path['root'].join('git_cache')
return ret
def resolve_revision(self, revision):
if hasattr(revision, 'resolve'):
return revision.resolve(self.m.properties)
return revision
def sync(self, cfg, with_branch_heads=False, **kwargs):
revisions = []
for i, s in enumerate(cfg.solutions):
if s.safesync_url: # prefer safesync_url in gclient mode
continue
if i == 0 and s.revision is None:
s.revision = RevisionFallbackChain()
if s.revision is not None and s.revision != '':
fixed_revision = self.resolve_revision(s.revision)
if fixed_revision:
revisions.extend(['--revision', '%s@%s' % (s.name, fixed_revision)])
for name, revision in sorted(cfg.revisions.items()):
fixed_revision = self.resolve_revision(revision)
if fixed_revision:
revisions.extend(['--revision', '%s@%s' % (name, fixed_revision)])
test_data_paths = set(cfg.got_revision_mapping.keys() +
[s.name for s in cfg.solutions])
step_test_data = lambda: (
self.test_api.output_json(test_data_paths, cfg.GIT_MODE))
try:
if not cfg.GIT_MODE:
args = ['sync', '--nohooks', '--force', '--verbose']
if cfg.delete_unversioned_trees:
args.append('--delete_unversioned_trees')
if with_branch_heads:
args.append('--with_branch_heads')
self('sync', args + revisions + ['--output-json', self.m.json.output()],
step_test_data=step_test_data,
**kwargs)
else:
# clean() isn't used because the gclient sync flags passed in checkout()
# do much the same thing, and they're more correct than doing a separate
# 'gclient revert' because it makes sure the other args are correct when
# a repo was deleted and needs to be re-cloned (notably
# --with_branch_heads), whereas 'revert' uses default args for clone
# operations.
#
# TODO(mmoss): To be like current official builders, this step could
# just delete the whole <slave_name>/build/ directory and start each
# build from scratch. That might be the least bad solution, at least
# until we have a reliable gclient method to produce a pristine working
# dir for git-based builds (e.g. maybe some combination of 'git
# reset/clean -fx' and removing the 'out' directory).
j = '-j2' if self.m.platform.is_win else '-j8'
args = ['sync', '--verbose', '--with_branch_heads', '--nohooks', j,
'--reset', '--force', '--upstream', '--no-nag-max']
if cfg.delete_unversioned_trees:
args.append('--delete_unversioned_trees')
self('sync', args + revisions +
['--output-json', self.m.json.output()],
step_test_data=step_test_data,
**kwargs)
finally:
result = self.m.step.active_result
data = result.json.output
for path, info in data['solutions'].iteritems():
# gclient json paths always end with a slash
path = path.rstrip('/')
if path in cfg.got_revision_mapping:
propname = cfg.got_revision_mapping[path]
result.presentation.properties[propname] = info['revision']
return result
def inject_parent_got_revision(self, gclient_config=None, override=False):
"""Match gclient config to build revisions obtained from build_properties.
Args:
gclient_config (gclient config object) - The config to manipulate. A value
of None manipulates the module's built-in config (self.c).
override (bool) - If True, will forcibly set revision and custom_vars
even if the config already contains values for them.
"""
cfg = gclient_config or self.c
for prop, custom_var in cfg.parent_got_revision_mapping.iteritems():
val = str(self.m.properties.get(prop, ''))
# TODO(infra): Fix coverage.
if val: # pragma: no cover
# Special case for 'src', inject into solutions[0]
if custom_var is None:
# This is not covered because we are deprecating this feature and
# it is no longer used by the public recipes.
if cfg.solutions[0].revision is None or override: # pragma: no cover
cfg.solutions[0].revision = val
else:
if custom_var not in cfg.solutions[0].custom_vars or override:
cfg.solutions[0].custom_vars[custom_var] = val
def checkout(self, gclient_config=None, revert=RevertOnTryserver,
inject_parent_got_revision=True, with_branch_heads=False,
**kwargs):
"""Return a step generator function for gclient checkouts."""
cfg = gclient_config or self.c
assert cfg.complete()
if revert is self.RevertOnTryserver:
revert = self.m.tryserver.is_tryserver
if inject_parent_got_revision:
self.inject_parent_got_revision(cfg, override=True)
spec_string = jsonish_to_python(cfg.as_jsonish(), True)
self('setup', ['config', '--spec', spec_string], **kwargs)
sync_step = None
try:
if not cfg.GIT_MODE:
try:
if revert:
self.revert(**kwargs)
finally:
sync_step = self.sync(cfg, with_branch_heads=with_branch_heads,
**kwargs)
else:
sync_step = self.sync(cfg, with_branch_heads=with_branch_heads,
**kwargs)
cfg_cmds = [
('user.name', 'local_bot'),
('user.email', 'local_bot@example.com'),
]
for var, val in cfg_cmds:
name = 'recurse (git config %s)' % var
self(name, ['recurse', 'git', 'config', var, val], **kwargs)
finally:
cwd = kwargs.get('cwd', self.m.path['slave_build'])
if 'checkout' not in self.m.path:
self.m.path['checkout'] = cwd.join(
*cfg.solutions[0].name.split(self.m.path.sep))
return sync_step
def revert(self, **kwargs):
"""Return a gclient_safe_revert step."""
# Not directly calling gclient, so don't use self().
alias = self.spec_alias
prefix = '%sgclient ' % (('[spec: %s] ' % alias) if alias else '')
return self.m.python(prefix + 'revert',
self.m.path['build'].join('scripts', 'slave', 'gclient_safe_revert.py'),
['.', self.m.path['depot_tools'].join('gclient',
platform_ext={'win': '.bat'})],
infra_step=True,
**kwargs
)
def runhooks(self, args=None, name='runhooks', **kwargs):
args = args or []
assert isinstance(args, (list, tuple))
return self(
name, ['runhooks'] + list(args), infra_step=False, **kwargs)
@property
def is_blink_mode(self):
""" Indicates wether the caller is to use the Blink config rather than the
Chromium config. This may happen for one of two reasons:
1. The builder is configured to always use TOT Blink. (factory property
top_of_tree_blink=True)
2. A try job comes in that applies to the Blink tree. (patch_project is
blink)
"""
return (
self.m.properties.get('top_of_tree_blink') or
self.m.properties.get('patch_project') == 'blink')
def break_locks(self):
"""Remove all index.lock files. If a previous run of git crashed, bot was
reset, etc... we might end up with leftover index.lock files.
"""
self.m.python.inline(
'cleanup index.lock',
"""
import os, sys
build_path = sys.argv[1]
if os.path.exists(build_path):
for (path, dir, files) in os.walk(build_path):
for cur_file in files:
if cur_file.endswith('index.lock'):
path_to_file = os.path.join(path, cur_file)
print 'deleting %s' % path_to_file
os.remove(path_to_file)
""",
args=[self.m.path['slave_build']],
infra_step=True,
)
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import types
from recipe_engine.config import config_item_context, ConfigGroup, BadConf
from recipe_engine.config import ConfigList, Dict, Single, Static, Set, List
from . import api as gclient_api
def BaseConfig(USE_MIRROR=True, GIT_MODE=False, CACHE_DIR=None,
PATCH_PROJECT=None, BUILDSPEC_VERSION=None,
**_kwargs):
deps = '.DEPS.git' if GIT_MODE else 'DEPS'
cache_dir = str(CACHE_DIR) if GIT_MODE and CACHE_DIR else None
return ConfigGroup(
solutions = ConfigList(
lambda: ConfigGroup(
name = Single(basestring),
url = Single(basestring),
deps_file = Single(basestring, empty_val=deps, required=False,
hidden=False),
managed = Single(bool, empty_val=True, required=False, hidden=False),
custom_deps = Dict(value_type=(basestring, types.NoneType)),
custom_vars = Dict(value_type=basestring),
safesync_url = Single(basestring, required=False),
revision = Single(
(basestring, gclient_api.RevisionResolver),
required=False, hidden=True),
)
),
deps_os = Dict(value_type=basestring),
hooks = List(basestring),
target_os = Set(basestring),
target_os_only = Single(bool, empty_val=False, required=False),
cache_dir = Static(cache_dir, hidden=False),
# If supplied, use this as the source root (instead of the first solution's
# checkout).
src_root = Single(basestring, required=False, hidden=True),
# Maps 'solution' -> build_property
got_revision_mapping = Dict(hidden=True),
# Addition revisions we want to pass in. For now theres a duplication
# of code here of setting custom vars AND passing in --revision. We hope
# to remove custom vars later.
revisions = Dict(
value_type=(basestring, gclient_api.RevisionResolver),
hidden=True),
# TODO(iannucci): HACK! The use of None here to indicate that we apply this
# to the solution.revision field is really terrible. I mostly blame
# gclient.
# Maps 'parent_build_property' -> 'custom_var_name'
# Maps 'parent_build_property' -> None
# If value is None, the property value will be applied to
# solutions[0].revision. Otherwise, it will be applied to
# solutions[0].custom_vars['custom_var_name']
parent_got_revision_mapping = Dict(hidden=True),
delete_unversioned_trees = Single(bool, empty_val=True, required=False),
# Check out refs/branch-heads.
# TODO (machenbach): Only implemented for bot_update atm.
with_branch_heads = Single(
bool,
empty_val=False,
required=False,
hidden=True),
GIT_MODE = Static(bool(GIT_MODE)),
USE_MIRROR = Static(bool(USE_MIRROR)),
PATCH_PROJECT = Static(str(PATCH_PROJECT), hidden=True),
BUILDSPEC_VERSION= Static(BUILDSPEC_VERSION, hidden=True),
)
config_ctx = config_item_context(BaseConfig)
# TODO(phajdan.jr): Remove chromium-specific helper.
def ChromiumGitURL(_c, *pieces): # pragma: no cover
return '/'.join(('https://chromium.googlesource.com',) + pieces)
# TODO(phajdan.jr): Remove chromium-specific helper.
def ChromeInternalGitURL(_c, *pieces): # pragma: no cover
return '/'.join(('https://chrome-internal.googlesource.com',) + pieces)
[
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"config",
"--spec",
"cache_dir = '[ROOT]/git_cache'\nsolutions = [{'deps_file': '.DEPS.git', 'managed': True, 'name': 'src', 'url': 'https://chromium.googlesource.com/chromium/src.git'}]"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient setup"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"sync",
"--verbose",
"--with_branch_heads",
"--nohooks",
"-j8",
"--reset",
"--force",
"--upstream",
"--no-nag-max",
"--delete_unversioned_trees",
"--output-json",
"/path/to/tmp/json"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient sync",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"solutions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@"
]
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"recurse",
"git",
"config",
"user.name",
"local_bot"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient recurse (git config user.name)"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"recurse",
"git",
"config",
"user.email",
"local_bot@example.com"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient recurse (git config user.email)"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"config",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'WebKit', 'safesync_url': 'https://blink-status.appspot.com/lkgr', 'url': 'svn://svn.chromium.org/blink/trunk'}]"
],
"cwd": "[SLAVE_BUILD]/src/third_party",
"name": "[spec: WebKit] gclient setup"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"sync",
"--nohooks",
"--force",
"--verbose",
"--delete_unversioned_trees",
"--with_branch_heads",
"--revision",
"third_party/WebKit@123",
"--output-json",
"/path/to/tmp/json"
],
"cwd": "[SLAVE_BUILD]/src/third_party",
"name": "[spec: WebKit] gclient sync",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"solutions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"WebKit/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": 241198@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"src/blatley/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": 248087@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_blatley_revision@248087@@@"
]
},
{
"cmd": [
"python",
"-u",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n",
"[SLAVE_BUILD]"
],
"cwd": "[SLAVE_BUILD]",
"name": "cleanup index.lock",
"~followup_annotations": [
"@@@STEP_LOG_LINE@python.inline@@@@",
"@@@STEP_LOG_LINE@python.inline@import os, sys@@@",
"@@@STEP_LOG_LINE@python.inline@@@@",
"@@@STEP_LOG_LINE@python.inline@build_path = sys.argv[1]@@@",
"@@@STEP_LOG_LINE@python.inline@if os.path.exists(build_path):@@@",
"@@@STEP_LOG_LINE@python.inline@ for (path, dir, files) in os.walk(build_path):@@@",
"@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@",
"@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@",
"@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@",
"@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@",
"@@@STEP_LOG_END@python.inline@@@"
]
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"runhooks"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient runhooks"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"config",
"--spec",
"cache_dir = '[ROOT]/git_cache'\nsolutions = [{'deps_file': '.DEPS.git', 'managed': True, 'name': 'src', 'url': 'https://chromium.googlesource.com/chromium/src.git'}]"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient setup"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"sync",
"--verbose",
"--with_branch_heads",
"--nohooks",
"-j8",
"--reset",
"--force",
"--upstream",
"--no-nag-max",
"--delete_unversioned_trees",
"--revision",
"src@abc",
"--output-json",
"/path/to/tmp/json"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient sync",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"solutions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@"
]
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"recurse",
"git",
"config",
"user.name",
"local_bot"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient recurse (git config user.name)"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"recurse",
"git",
"config",
"user.email",
"local_bot@example.com"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient recurse (git config user.email)"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"config",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'WebKit', 'safesync_url': 'https://blink-status.appspot.com/lkgr', 'url': 'svn://svn.chromium.org/blink/trunk'}]"
],
"cwd": "[SLAVE_BUILD]/src/third_party",
"name": "[spec: WebKit] gclient setup"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"sync",
"--nohooks",
"--force",
"--verbose",
"--delete_unversioned_trees",
"--with_branch_heads",
"--revision",
"third_party/WebKit@123",
"--output-json",
"/path/to/tmp/json"
],
"cwd": "[SLAVE_BUILD]/src/third_party",
"name": "[spec: WebKit] gclient sync",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"solutions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"WebKit/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": 241198@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"src/blatley/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": 248087@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_blatley_revision@248087@@@"
]
},
{
"cmd": [
"python",
"-u",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n",
"[SLAVE_BUILD]"
],
"cwd": "[SLAVE_BUILD]",
"name": "cleanup index.lock",
"~followup_annotations": [
"@@@STEP_LOG_LINE@python.inline@@@@",
"@@@STEP_LOG_LINE@python.inline@import os, sys@@@",
"@@@STEP_LOG_LINE@python.inline@@@@",
"@@@STEP_LOG_LINE@python.inline@build_path = sys.argv[1]@@@",
"@@@STEP_LOG_LINE@python.inline@if os.path.exists(build_path):@@@",
"@@@STEP_LOG_LINE@python.inline@ for (path, dir, files) in os.walk(build_path):@@@",
"@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@",
"@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@",
"@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@",
"@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@",
"@@@STEP_LOG_END@python.inline@@@"
]
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"runhooks"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient runhooks"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"config",
"--spec",
"cache_dir = '[ROOT]/git_cache'\nsolutions = [{'deps_file': '.DEPS.git', 'managed': True, 'name': 'src', 'url': 'https://chromium.googlesource.com/chromium/src.git'}]"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient setup"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"sync",
"--verbose",
"--with_branch_heads",
"--nohooks",
"-j8",
"--reset",
"--force",
"--upstream",
"--no-nag-max",
"--delete_unversioned_trees",
"--revision",
"src@HEAD",
"--output-json",
"/path/to/tmp/json"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient sync",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"solutions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"src/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": \"f27fede2220bcd326aee3e86ddfd4ebd0fe58cb9\"@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@"
]
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"recurse",
"git",
"config",
"user.name",
"local_bot"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient recurse (git config user.name)"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"recurse",
"git",
"config",
"user.email",
"local_bot@example.com"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient recurse (git config user.email)"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"config",
"--spec",
"cache_dir = None\nsolutions = [{'deps_file': 'DEPS', 'managed': True, 'name': 'WebKit', 'safesync_url': 'https://blink-status.appspot.com/lkgr', 'url': 'svn://svn.chromium.org/blink/trunk'}]"
],
"cwd": "[SLAVE_BUILD]/src/third_party",
"name": "[spec: WebKit] gclient setup"
},
{
"cmd": [
"python",
"-u",
"[BUILD]/scripts/slave/gclient_safe_revert.py",
".",
"[DEPOT_TOOLS]/gclient"
],
"cwd": "[SLAVE_BUILD]/src/third_party",
"name": "[spec: WebKit] gclient revert"
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"sync",
"--nohooks",
"--force",
"--verbose",
"--delete_unversioned_trees",
"--with_branch_heads",
"--revision",
"third_party/WebKit@123",
"--output-json",
"/path/to/tmp/json"
],
"cwd": "[SLAVE_BUILD]/src/third_party",
"name": "[spec: WebKit] gclient sync",
"~followup_annotations": [
"@@@STEP_LOG_LINE@json.output@{@@@",
"@@@STEP_LOG_LINE@json.output@ \"solutions\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"WebKit/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": 241198@@@",
"@@@STEP_LOG_LINE@json.output@ }, @@@",
"@@@STEP_LOG_LINE@json.output@ \"src/blatley/\": {@@@",
"@@@STEP_LOG_LINE@json.output@ \"revision\": 248087@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@ }@@@",
"@@@STEP_LOG_LINE@json.output@}@@@",
"@@@STEP_LOG_END@json.output@@@",
"@@@SET_BUILD_PROPERTY@got_blatley_revision@248087@@@"
]
},
{
"cmd": [
"python",
"-u",
"\nimport os, sys\n\nbuild_path = sys.argv[1]\nif os.path.exists(build_path):\n for (path, dir, files) in os.walk(build_path):\n for cur_file in files:\n if cur_file.endswith('index.lock'):\n path_to_file = os.path.join(path, cur_file)\n print 'deleting %s' % path_to_file\n os.remove(path_to_file)\n",
"[SLAVE_BUILD]"
],
"cwd": "[SLAVE_BUILD]",
"name": "cleanup index.lock",
"~followup_annotations": [
"@@@STEP_LOG_LINE@python.inline@@@@",
"@@@STEP_LOG_LINE@python.inline@import os, sys@@@",
"@@@STEP_LOG_LINE@python.inline@@@@",
"@@@STEP_LOG_LINE@python.inline@build_path = sys.argv[1]@@@",
"@@@STEP_LOG_LINE@python.inline@if os.path.exists(build_path):@@@",
"@@@STEP_LOG_LINE@python.inline@ for (path, dir, files) in os.walk(build_path):@@@",
"@@@STEP_LOG_LINE@python.inline@ for cur_file in files:@@@",
"@@@STEP_LOG_LINE@python.inline@ if cur_file.endswith('index.lock'):@@@",
"@@@STEP_LOG_LINE@python.inline@ path_to_file = os.path.join(path, cur_file)@@@",
"@@@STEP_LOG_LINE@python.inline@ print 'deleting %s' % path_to_file@@@",
"@@@STEP_LOG_LINE@python.inline@ os.remove(path_to_file)@@@",
"@@@STEP_LOG_END@python.inline@@@"
]
},
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/gclient.py",
"runhooks"
],
"cwd": "[SLAVE_BUILD]",
"name": "gclient runhooks"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'gclient',
'recipe_engine/path',
'recipe_engine/properties',
]
def RunSteps(api):
src_cfg = api.gclient.make_config(GIT_MODE=True)
soln = src_cfg.solutions.add()
soln.name = 'src'
soln.url = 'https://chromium.googlesource.com/chromium/src.git'
soln.revision = api.properties.get('revision')
src_cfg.parent_got_revision_mapping['parent_got_revision'] = 'got_revision'
api.gclient.c = src_cfg
api.gclient.checkout()
api.gclient.spec_alias = 'WebKit'
bl_cfg = api.gclient.make_config()
soln = bl_cfg.solutions.add()
soln.name = 'WebKit'
soln.url = 'svn://svn.chromium.org/blink/trunk'
bl_cfg.revisions['third_party/WebKit'] = '123'
# Use safesync url for lkgr.
soln.safesync_url = 'https://blink-status.appspot.com/lkgr'
bl_cfg.got_revision_mapping['src/blatley'] = 'got_blatley_revision'
api.gclient.checkout(
gclient_config=bl_cfg,
with_branch_heads=True,
cwd=api.path['slave_build'].join('src', 'third_party'))
api.gclient.break_locks()
del api.gclient.spec_alias
api.gclient.runhooks()
assert not api.gclient.is_blink_mode
def GenTests(api):
yield api.test('basic')
yield api.test('revision') + api.properties(revision='abc')
yield api.test('tryserver') + api.properties.tryserver()
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import hashlib
from recipe_engine import recipe_test_api
class GclientTestApi(recipe_test_api.RecipeTestApi):
def output_json(self, projects, git_mode=False):
"""Deterministically synthesize json.output test data for gclient's
--output-json option.
Args:
projects - a list of project paths (e.g. ['src', 'src/dependency'])
git_mode - Return git hashes instead of svn revs.
"""
# TODO(iannucci): Account for parent_got_revision_mapping. Right now the
# synthesized json output from this method will always use
# gen_revision(project), but if parent_got_revision and its ilk are
# specified, we should use those values instead.
return self.m.json.output({
'solutions': dict(
(p+'/', {'revision': self.gen_revision(p, git_mode)})
for p in projects
)
})
@staticmethod
def gen_revision(project, GIT_MODE):
"""Hash project to bogus deterministic revision values."""
h = hashlib.sha1(project)
if GIT_MODE:
return h.hexdigest()
else:
import struct
return struct.unpack('!I', h.digest()[:4])[0] % 300000
DEPS = [
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
]
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import itertools
import re
from recipe_engine import recipe_api
class GitApi(recipe_api.RecipeApi):
_GIT_HASH_RE = re.compile('[0-9a-f]{40}', re.IGNORECASE)
def __call__(self, *args, **kwargs):
"""Return a git command step."""
name = kwargs.pop('name', 'git '+args[0])
infra_step = kwargs.pop('infra_step', True)
if 'cwd' not in kwargs:
kwargs.setdefault('cwd', self.m.path['checkout'])
git_cmd = ['git']
if self.m.platform.is_win:
git_cmd = [self.m.path['depot_tools'].join('git.bat')]
options = kwargs.pop('git_config_options', {})
for k, v in sorted(options.iteritems()):
git_cmd.extend(['-c', '%s=%s' % (k, v)])
can_fail_build = kwargs.pop('can_fail_build', True)
try:
return self.m.step(name, git_cmd + list(args), infra_step=infra_step,
**kwargs)
except self.m.step.StepFailure as f:
if can_fail_build:
raise
else:
return f.result
def fetch_tags(self, remote_name=None, **kwargs):
"""Fetches all tags from the remote."""
kwargs.setdefault('name', 'git fetch tags')
remote_name = remote_name or 'origin'
return self('fetch', remote_name, '--tags', **kwargs)
def cat_file_at_commit(self, file_path, commit_hash, remote_name=None,
**kwargs):
"""Outputs the contents of a file at a given revision."""
self.fetch_tags(remote_name=remote_name, **kwargs)
kwargs.setdefault('name', 'git cat-file %s:%s' % (commit_hash, file_path))
return self('cat-file', 'blob', '%s:%s' % (commit_hash, file_path),
**kwargs)
def count_objects(self, previous_result=None, can_fail_build=False, **kwargs):
"""Returns `git count-objects` result as a dict.
Args:
previous_result (dict): the result of previous count_objects call.
If passed, delta is reported in the log and step text.
can_fail_build (bool): if True, may fail the build and/or raise an
exception. Defaults to False.
Returns:
A dict of count-object values, or None if count-object run failed.
"""
if previous_result:
assert isinstance(previous_result, dict)
assert all(isinstance(v, long) for v in previous_result.itervalues())
assert 'size' in previous_result
assert 'size-pack' in previous_result
step_result = None
try:
step_result = self(
'count-objects', '-v', stdout=self.m.raw_io.output(),
can_fail_build=can_fail_build, **kwargs)
if not step_result.stdout:
return None
result = {}
for line in step_result.stdout.splitlines():
name, value = line.split(':', 1)
result[name] = long(value.strip())
def results_to_text(results):
return [' %s: %s' % (k, v) for k, v in results.iteritems()]
step_result.presentation.logs['result'] = results_to_text(result)
if previous_result:
delta = {
key: value - previous_result[key]
for key, value in result.iteritems()
if key in previous_result}
step_result.presentation.logs['delta'] = (
['before:'] + results_to_text(previous_result) +
['', 'after:'] + results_to_text(result) +
['', 'delta:'] + results_to_text(delta)
)
size_delta = (
result['size'] + result['size-pack']
- previous_result['size'] - previous_result['size-pack'])
# size_delta is in KiB.
step_result.presentation.step_text = (
'size delta: %+.2f MiB' % (size_delta / 1024.0))
return result
except Exception as ex:
if step_result:
step_result.presentation.logs['exception'] = ['%r' % ex]
step_result.presentation.status = self.m.step.WARNING
if can_fail_build:
raise recipe_api.InfraFailure('count-objects failed: %s' % ex)
return None
def checkout(self, url, ref=None, dir_path=None, recursive=False,
submodules=True, submodule_update_force=False,
keep_paths=None, step_suffix=None,
curl_trace_file=None, can_fail_build=True,
set_got_revision=False, remote_name=None,
display_fetch_size=None, file_name=None,
submodule_update_recursive=True):
"""Returns an iterable of steps to perform a full git checkout.
Args:
url (str): url of remote repo to use as upstream
ref (str): ref to fetch and check out
dir_path (Path): optional directory to clone into
recursive (bool): whether to recursively fetch submodules or not
submodules (bool): whether to sync and update submodules or not
submodule_update_force (bool): whether to update submodules with --force
keep_paths (iterable of strings): paths to ignore during git-clean;
paths are gitignore-style patterns relative to checkout_path.
step_suffix (str): suffix to add to a each step name
curl_trace_file (Path): if not None, dump GIT_CURL_VERBOSE=1 trace to that
file. Useful for debugging git issue reproducible only on bots. It has
a side effect of all stderr output of 'git fetch' going to that file.
can_fail_build (bool): if False, ignore errors during fetch or checkout.
set_got_revision (bool): if True, resolves HEAD and sets got_revision
property.
remote_name (str): name of the git remote to use
display_fetch_size (bool): if True, run `git count-objects` before and
after fetch and display delta. Adds two more steps. Defaults to False.
file_name (str): optional path to a single file to checkout.
submodule_update_recursive (bool): if True, updates submodules
recursively.
"""
# TODO(robertocn): Break this function and refactor calls to it.
# The problem is that there are way too many unrealated use cases for
# it, and the function's signature is getting unwieldy and its body
# unreadable.
display_fetch_size = display_fetch_size or False
if not dir_path:
dir_path = url.rsplit('/', 1)[-1]
if dir_path.endswith('.git'): # ex: https://host/foobar.git
dir_path = dir_path[:-len('.git')]
# ex: ssh://host:repo/foobar/.git
dir_path = dir_path or dir_path.rsplit('/', 1)[-1]
dir_path = self.m.path['slave_build'].join(dir_path)
if 'checkout' not in self.m.path:
self.m.path['checkout'] = dir_path
git_setup_args = ['--path', dir_path, '--url', url]
if remote_name:
git_setup_args += ['--remote', remote_name]
else:
remote_name = 'origin'
if self.m.platform.is_win:
git_setup_args += ['--git_cmd_path',
self.m.path['depot_tools'].join('git.bat')]
step_suffix = '' if step_suffix is None else ' (%s)' % step_suffix
self.m.python(
'git setup%s' % step_suffix,
self.resource('git_setup.py'),
git_setup_args)
# There are five kinds of refs we can be handed:
# 0) None. In this case, we default to properties['branch'].
# 1) A 40-character SHA1 hash.
# 2) A fully-qualifed arbitrary ref, e.g. 'refs/foo/bar/baz'.
# 3) A fully qualified branch name, e.g. 'refs/heads/master'.
# Chop off 'refs/heads' and now it matches case (4).
# 4) A branch name, e.g. 'master'.
# Note that 'FETCH_HEAD' can be many things (and therefore not a valid
# checkout target) if many refs are fetched, but we only explicitly fetch
# one ref here, so this is safe.
fetch_args = []
if not ref: # Case 0
fetch_remote = remote_name
fetch_ref = self.m.properties.get('branch') or 'master'
checkout_ref = 'FETCH_HEAD'
elif self._GIT_HASH_RE.match(ref): # Case 1.
fetch_remote = remote_name
fetch_ref = ''
checkout_ref = ref
elif ref.startswith('refs/heads/'): # Case 3.
fetch_remote = remote_name
fetch_ref = ref[len('refs/heads/'):]
checkout_ref = 'FETCH_HEAD'
else: # Cases 2 and 4.
fetch_remote = remote_name
fetch_ref = ref
checkout_ref = 'FETCH_HEAD'
fetch_args = [x for x in (fetch_remote, fetch_ref) if x]
if recursive:
fetch_args.append('--recurse-submodules')
fetch_env = {}
fetch_stderr = None
if curl_trace_file:
fetch_env['GIT_CURL_VERBOSE'] = '1'
fetch_stderr = self.m.raw_io.output(leak_to=curl_trace_file)
fetch_step_name = 'git fetch%s' % step_suffix
if display_fetch_size:
count_objects_before_fetch = self.count_objects(
name='count-objects before %s' % fetch_step_name,
cwd=dir_path,
step_test_data=lambda: self.m.raw_io.test_api.stream_output(
self.test_api.count_objects_output(1000)))
self('retry', 'fetch', *fetch_args,
cwd=dir_path,
name=fetch_step_name,
env=fetch_env,
stderr=fetch_stderr,
can_fail_build=can_fail_build)
if display_fetch_size:
self.count_objects(
name='count-objects after %s' % fetch_step_name,
cwd=dir_path,
previous_result=count_objects_before_fetch,
step_test_data=lambda: self.m.raw_io.test_api.stream_output(
self.test_api.count_objects_output(2000)))
if file_name:
self('checkout', '-f', checkout_ref, '--', file_name,
cwd=dir_path,
name='git checkout%s' % step_suffix,
can_fail_build=can_fail_build)
else:
self('checkout', '-f', checkout_ref,
cwd=dir_path,
name='git checkout%s' % step_suffix,
can_fail_build=can_fail_build)
if set_got_revision:
rev_parse_step = self('rev-parse', 'HEAD',
cwd=dir_path,
name='set got_revision',
stdout=self.m.raw_io.output(),
can_fail_build=False)
if rev_parse_step.presentation.status == 'SUCCESS':
sha = rev_parse_step.stdout.strip()
rev_parse_step.presentation.properties['got_revision'] = sha
clean_args = list(itertools.chain(
*[('-e', path) for path in keep_paths or []]))
self('clean', '-f', '-d', '-x', *clean_args,
name='git clean%s' % step_suffix,
cwd=dir_path,
can_fail_build=can_fail_build)
if submodules:
self('submodule', 'sync',
name='submodule sync%s' % step_suffix,
cwd=dir_path,
can_fail_build=can_fail_build)
submodule_update = ['submodule', 'update', '--init']
if submodule_update_recursive:
submodule_update.append('--recursive')
if submodule_update_force:
submodule_update.append('--force')
self(*submodule_update,
name='submodule update%s' % step_suffix,
cwd=dir_path,
can_fail_build=can_fail_build)
def get_timestamp(self, commit='HEAD', test_data=None, **kwargs):
"""Find and return the timestamp of the given commit."""
step_test_data = None
if test_data is not None:
step_test_data = lambda: self.m.raw_io.test_api.stream_output(test_data)
return self('show', commit, '--format=%at', '-s',
stdout=self.m.raw_io.output(),
step_test_data=step_test_data).stdout.rstrip()
def rebase(self, name_prefix, branch, dir_path, remote_name=None,
**kwargs):
"""Run rebase HEAD onto branch
Args:
name_prefix (str): a prefix used for the step names
branch (str): a branch name or a hash to rebase onto
dir_path (Path): directory to clone into
remote_name (str): the remote name to rebase from if not origin
"""
remote_name = remote_name or 'origin'
try:
self('rebase', '%s/master' % remote_name,
name="%s rebase" % name_prefix, cwd=dir_path, **kwargs)
except self.m.step.StepFailure:
self('rebase', '--abort', name='%s rebase abort' % name_prefix,
cwd=dir_path, **kwargs)
raise
def config_get(self, prop_name, **kwargs):
"""Returns: (str) The Git config output, or None if no output was generated.
Args:
prop_name: (str) The name of the config property to query.
kwargs: Forwarded to '__call__'.
"""
kwargs['name'] = kwargs.get('name', 'git config %s' % (prop_name,))
result = self('config', '--get', prop_name, stdout=self.m.raw_io.output(),
**kwargs)
value = result.stdout
if value:
value = value.strip()
result.presentation.step_text = value
return value
def get_remote_url(self, remote_name=None, **kwargs):
"""Returns: (str) The URL of the remote Git repository, or None.
Args:
remote_name: (str) The name of the remote to query, defaults to 'origin'.
kwargs: Forwarded to '__call__'.
"""
remote_name = remote_name or 'origin'
return self.config_get('remote.%s.url' % (remote_name,), **kwargs)
def bundle_create(self, bundle_path, rev_list_args=None, **kwargs):
"""Run 'git bundle create' on a Git repository.
Args:
bundle_path (Path): The path of the output bundle.
refs (list): The list of refs to include in the bundle. If None, all
refs in the Git checkout will be bundled.
kwargs: Forwarded to '__call__'.
"""
if not rev_list_args:
rev_list_args = ['--all']
self('bundle', 'create', bundle_path, *rev_list_args, **kwargs)
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"testing",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD",
"--",
"DEPS"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"abcdef0123456789abcdef0123456789abcdef01"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"refs/foo/bar",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive",
"--force"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_EXCEPTION@@@"
]
},
{
"name": "$result",
"reason": "Infra Failure: Step('git status can_fail_build') returned 1",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_EXCEPTION@@@"
]
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"abcdef12345",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags (2)",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"cat-file",
"blob",
"abcdef12345:TestFile"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git cat-file abcdef12345:TestFile",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects before git fetch",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_LOG_LINE@result@ count: 1000@@@",
"@@@STEP_LOG_LINE@result@ garbage: 1000@@@",
"@@@STEP_LOG_LINE@result@ packs: 1000@@@",
"@@@STEP_LOG_LINE@result@ in_pack: 1000@@@",
"@@@STEP_LOG_LINE@result@ size-pack: 1000@@@",
"@@@STEP_LOG_LINE@result@ size-garbage: 1000@@@",
"@@@STEP_LOG_LINE@result@ prune-packable: 1000@@@",
"@@@STEP_LOG_LINE@result@ size: 1000@@@",
"@@@STEP_LOG_END@result@@@"
]
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects after git fetch",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@size delta: +1.95 MiB@@@",
"@@@STEP_LOG_LINE@result@ count: 2000@@@",
"@@@STEP_LOG_LINE@result@ garbage: 2000@@@",
"@@@STEP_LOG_LINE@result@ packs: 2000@@@",
"@@@STEP_LOG_LINE@result@ in_pack: 2000@@@",
"@@@STEP_LOG_LINE@result@ size-pack: 2000@@@",
"@@@STEP_LOG_LINE@result@ size-garbage: 2000@@@",
"@@@STEP_LOG_LINE@result@ prune-packable: 2000@@@",
"@@@STEP_LOG_LINE@result@ size: 2000@@@",
"@@@STEP_LOG_END@result@@@",
"@@@STEP_LOG_LINE@delta@before:@@@",
"@@@STEP_LOG_LINE@delta@ count: 1000@@@",
"@@@STEP_LOG_LINE@delta@ garbage: 1000@@@",
"@@@STEP_LOG_LINE@delta@ packs: 1000@@@",
"@@@STEP_LOG_LINE@delta@ in_pack: 1000@@@",
"@@@STEP_LOG_LINE@delta@ size-pack: 1000@@@",
"@@@STEP_LOG_LINE@delta@ size-garbage: 1000@@@",
"@@@STEP_LOG_LINE@delta@ prune-packable: 1000@@@",
"@@@STEP_LOG_LINE@delta@ size: 1000@@@",
"@@@STEP_LOG_LINE@delta@@@@",
"@@@STEP_LOG_LINE@delta@after:@@@",
"@@@STEP_LOG_LINE@delta@ count: 2000@@@",
"@@@STEP_LOG_LINE@delta@ garbage: 2000@@@",
"@@@STEP_LOG_LINE@delta@ packs: 2000@@@",
"@@@STEP_LOG_LINE@delta@ in_pack: 2000@@@",
"@@@STEP_LOG_LINE@delta@ size-pack: 2000@@@",
"@@@STEP_LOG_LINE@delta@ size-garbage: 2000@@@",
"@@@STEP_LOG_LINE@delta@ prune-packable: 2000@@@",
"@@@STEP_LOG_LINE@delta@ size: 2000@@@",
"@@@STEP_LOG_LINE@delta@@@@",
"@@@STEP_LOG_LINE@delta@delta:@@@",
"@@@STEP_LOG_LINE@delta@ count: 1000@@@",
"@@@STEP_LOG_LINE@delta@ garbage: 1000@@@",
"@@@STEP_LOG_LINE@delta@ packs: 1000@@@",
"@@@STEP_LOG_LINE@delta@ prune-packable: 1000@@@",
"@@@STEP_LOG_LINE@delta@ size-pack: 1000@@@",
"@@@STEP_LOG_LINE@delta@ size-garbage: 1000@@@",
"@@@STEP_LOG_LINE@delta@ in_pack: 1000@@@",
"@@@STEP_LOG_LINE@delta@ size: 1000@@@",
"@@@STEP_LOG_END@delta@@@"
]
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_EXCEPTION@@@"
]
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_LOG_LINE@exception@ValueError(\"invalid literal for long() with base 10: 'xxx'\",)@@@",
"@@@STEP_LOG_END@exception@@@",
"@@@STEP_WARNINGS@@@"
]
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_LOG_LINE@exception@ValueError(\"invalid literal for long() with base 10: 'xxx'\",)@@@",
"@@@STEP_LOG_END@exception@@@",
"@@@STEP_WARNINGS@@@"
]
},
{
"name": "$result",
"reason": "count-objects failed: invalid literal for long() with base 10: 'xxx'",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"refs/foo/bar",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"env": {
"GIT_CURL_VERBOSE": "1"
},
"name": "git fetch",
"stderr": "[SLAVE_BUILD]/curl_trace.log"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]\\resources\\git_setup.py",
"--path",
"[SLAVE_BUILD]\\src",
"--url",
"https://chromium.googlesource.com/chromium/src.git",
"--git_cmd_path",
"[DEPOT_TOOLS]\\git.bat"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git fetch"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git checkout"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git clean"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "submodule sync"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "submodule update"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git fetch tags"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"status"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git status"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"status"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git status can_fail_build"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"status"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "my repo rebase"
},
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"bundle",
"create",
"[SLAVE_BUILD]\\all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]\\src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_EXCEPTION@@@"
]
},
{
"cmd": [
"git",
"rebase",
"--abort"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase abort"
},
{
"name": "$result",
"reason": "Infra Failure: Step('my repo rebase') returned 1",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git",
"--remote",
"not_origin"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"not_origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"not_origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"not_origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"RECIPE_MODULE[git]/resources/git_setup.py",
"--path",
"[SLAVE_BUILD]/src",
"--url",
"https://chromium.googlesource.com/chromium/src.git"
],
"cwd": "[SLAVE_BUILD]",
"name": "git setup"
},
{
"cmd": [
"git",
"retry",
"fetch",
"origin",
"master",
"--recurse-submodules"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git checkout"
},
{
"cmd": [
"git",
"rev-parse",
"HEAD"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "set got_revision",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@SET_BUILD_PROPERTY@got_revision@\"deadbeef\"@@@"
]
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git clean"
},
{
"cmd": [
"git",
"submodule",
"sync"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule sync"
},
{
"cmd": [
"git",
"submodule",
"update",
"--init",
"--recursive"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "submodule update"
},
{
"cmd": [
"git",
"-c",
"foo=bar",
"count-objects",
"-v"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "count-objects",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"config",
"--get",
"remote.origin.url"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git config remote.origin.url",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_TEXT@foo@@@"
]
},
{
"cmd": [
"git",
"show",
"HEAD",
"--format=%at",
"-s"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git show",
"stdout": "/path/to/tmp/"
},
{
"cmd": [
"git",
"fetch",
"origin",
"--tags"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git fetch tags"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status can_fail_build"
},
{
"cmd": [
"git",
"status"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git status cannot_fail_build"
},
{
"cmd": [
"git",
"rebase",
"origin/master"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "my repo rebase"
},
{
"cmd": [
"git",
"bundle",
"create",
"[SLAVE_BUILD]/all.bundle",
"--all"
],
"cwd": "[SLAVE_BUILD]/src",
"name": "git bundle"
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'git',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/raw_io',
'recipe_engine/step',
]
def RunSteps(api):
url = 'https://chromium.googlesource.com/chromium/src.git'
# git.checkout can optionally dump GIT_CURL_VERBOSE traces to a log file,
# useful for debugging git access issues that are reproducible only on bots.
curl_trace_file = None
if api.properties.get('use_curl_trace'):
curl_trace_file = api.path['slave_build'].join('curl_trace.log')
submodule_update_force = api.properties.get('submodule_update_force', False)
submodule_update_recursive = api.properties.get('submodule_update_recursive',
True)
# You can use api.git.checkout to perform all the steps of a safe checkout.
api.git.checkout(
url,
ref=api.properties.get('revision'),
recursive=True,
submodule_update_force=submodule_update_force,
set_got_revision=api.properties.get('set_got_revision'),
curl_trace_file=curl_trace_file,
remote_name=api.properties.get('remote_name'),
display_fetch_size=api.properties.get('display_fetch_size'),
file_name=api.properties.get('checkout_file_name'),
submodule_update_recursive=submodule_update_recursive)
# count_objects shows number and size of objects in .git dir.
api.git.count_objects(
name='count-objects',
can_fail_build=api.properties.get('count_objects_can_fail_build'),
git_config_options={'foo': 'bar'})
# Get the remote URL.
api.git.get_remote_url(
step_test_data=lambda: api.raw_io.test_api.stream_output('foo'))
api.git.get_timestamp(test_data='foo')
# You can use api.git.fetch_tags to fetch all tags from the remote
api.git.fetch_tags(api.properties.get('remote_name'))
# If you need to run more arbitrary git commands, you can use api.git itself,
# which behaves like api.step(), but automatically sets the name of the step.
api.git('status', cwd=api.path['checkout'])
api.git('status', name='git status can_fail_build',
can_fail_build=True)
api.git('status', name='git status cannot_fail_build',
can_fail_build=False)
# You can use api.git.rebase to rebase the current branch onto another one
api.git.rebase(name_prefix='my repo', branch='origin/master',
dir_path=api.path['checkout'],
remote_name=api.properties.get('remote_name'))
if api.properties.get('cat_file', None):
step_result = api.git.cat_file_at_commit(api.properties['cat_file'],
api.properties['revision'],
stdout=api.raw_io.output())
if 'TestOutput' in step_result.stdout:
pass # Success!
# Bundle the repository.
api.git.bundle_create(
api.path['slave_build'].join('all.bundle'))
def GenTests(api):
yield api.test('basic')
yield api.test('basic_ref') + api.properties(revision='refs/foo/bar')
yield api.test('basic_branch') + api.properties(revision='refs/heads/testing')
yield api.test('basic_hash') + api.properties(
revision='abcdef0123456789abcdef0123456789abcdef01')
yield api.test('basic_file_name') + api.properties(checkout_file_name='DEPS')
yield api.test('basic_submodule_update_force') + api.properties(
submodule_update_force=True)
yield api.test('platform_win') + api.platform.name('win')
yield api.test('curl_trace_file') + api.properties(
revision='refs/foo/bar', use_curl_trace=True)
yield (
api.test('can_fail_build') +
api.step_data('git status can_fail_build', retcode=1)
)
yield (
api.test('cannot_fail_build') +
api.step_data('git status cannot_fail_build', retcode=1)
)
yield (
api.test('set_got_revision') +
api.properties(set_got_revision=True) +
api.step_data('set got_revision',
stdout=api.raw_io.output('deadbeef'))
)
yield (
api.test('rebase_failed') +
api.step_data('my repo rebase', retcode=1)
)
yield api.test('remote_not_origin') + api.properties(remote_name='not_origin')
yield (
api.test('count-objects_delta') +
api.properties(display_fetch_size=True))
yield (
api.test('count-objects_failed') +
api.step_data('count-objects', retcode=1))
yield (
api.test('count-objects_with_bad_output') +
api.step_data(
'count-objects',
stdout=api.raw_io.output(api.git.count_objects_output('xxx'))))
yield (
api.test('count-objects_with_bad_output_fails_build') +
api.step_data(
'count-objects',
stdout=api.raw_io.output(api.git.count_objects_output('xxx'))) +
api.properties(count_objects_can_fail_build=True))
yield (
api.test('cat-file_test') +
api.step_data('git cat-file abcdef12345:TestFile',
stdout=api.raw_io.output('TestOutput')) +
api.properties(revision='abcdef12345', cat_file='TestFile'))
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script ensures that a given directory is an initialized git repo."""
import argparse
import logging
import os
import subprocess
import sys
def run_git(git_cmd, *args, **kwargs):
"""Runs git with given arguments.
kwargs are passed through to subprocess.
If the kwarg 'throw' is provided, this behaves as check_call, otherwise will
return git's return value.
"""
logging.info('Running: %s %s %s', git_cmd, args, kwargs)
func = subprocess.check_call if kwargs.pop('throw', True) else subprocess.call
return func((git_cmd,)+args, **kwargs)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--path', help='Path to prospective git repo.',
required=True)
parser.add_argument('--url', help='URL of remote to make origin.',
required=True)
parser.add_argument('--git_cmd_path',
help='Path to the git command to run.',
default='git')
parser.add_argument('--remote', help='Name of the git remote.',
default='origin')
parser.add_argument('-v', '--verbose', action='store_true')
opts = parser.parse_args()
path = opts.path
remote = opts.remote
url = opts.url
logging.getLogger().setLevel(logging.DEBUG if opts.verbose else logging.WARN)
if not os.path.exists(path):
os.makedirs(path)
if os.path.exists(os.path.join(path, '.git')):
run_git(opts.git_cmd_path, 'config', '--remove-section',
'remote.%s' % remote, cwd=path)
else:
run_git(opts.git_cmd_path, 'init', cwd=path)
run_git(opts.git_cmd_path, 'remote', 'add', remote, url, cwd=path)
return 0
if __name__ == '__main__':
sys.exit(main())
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from recipe_engine import recipe_test_api
class GitTestApi(recipe_test_api.RecipeTestApi):
def count_objects_output(self, value):
return (
'count: %s\n'
'size: %s\n'
'in_pack: %s\n'
'packs: %s\n'
'size-pack: %s\n'
'prune-packable: %s\n'
'garbage: %s\n'
'size-garbage: %s\n'
) % tuple([value] * 8)
DEPS = [
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/python',
]
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import urlparse
from recipe_engine import recipe_api
class RietveldApi(recipe_api.RecipeApi):
def calculate_issue_root(self, extra_patch_project_roots=None):
"""Returns path where a patch should be applied to based on "patch_project".
Maps Rietveld's "patch_project" to a path of directories relative to
api.gclient.c.solutions[0].name which describe where to place the patch.
Args:
extra_patch_project_roots: Dict mapping project names to relative roots.
Returns:
Relative path or empty string if patch_project is not set or path for a
given is unknown.
"""
# Property 'patch_project' is set by Rietveld, 'project' is set by git-try
# when TRYSERVER_PROJECT is present in codereview.settings.
patch_project = (self.m.properties.get('patch_project') or
self.m.properties.get('project'))
# Please avoid adding projects into this hard-coded list unless your project
# CLs are being run by multiple recipes. Instead pass patch_project_roots to
# ensure_checkout.
patch_project_roots = {
'angle/angle': ['third_party', 'angle'],
'blink': ['third_party', 'WebKit'],
'v8': ['v8'],
'luci-py': ['luci'],
'recipes-py': ['recipes-py'],
}
# Make sure to update common projects (above) with extra projects (and not
# vice versa, so that recipes can override default values if needed.
if extra_patch_project_roots:
patch_project_roots.update(extra_patch_project_roots)
path_parts = patch_project_roots.get(patch_project)
return self.m.path.join(*path_parts) if path_parts else ''
def apply_issue(self, *root_pieces, **kwargs):
"""Call apply_issue from depot_tools.
Args:
root_pieces (strings): location where to run apply_issue, relative to the
checkout root.
authentication (string or None): authentication scheme to use. Can be None
or 'oauth2'. See also apply_issue.py --help (-E and --no-auth options.)
"""
# TODO(pgervais): replace *root_pieces by a single Path object.
authentication = kwargs.get('authentication', None)
rietveld_url = self.m.properties['rietveld']
issue_number = self.m.properties['issue']
if authentication == 'oauth2':
step_result = self.m.python(
'apply_issue',
self.m.path['depot_tools'].join('apply_issue.py'), [
'-r', self.m.path['checkout'].join(*root_pieces),
'-i', issue_number,
'-p', self.m.properties['patchset'],
'-s', rietveld_url,
'-E', self.m.path['build'].join('site_config',
'.rietveld_client_email'),
'-k', self.m.path['build'].join('site_config',
'.rietveld_secret_key')
],
)
else:
step_result = self.m.python(
'apply_issue',
self.m.path['depot_tools'].join('apply_issue.py'), [
'-r', self.m.path['checkout'].join(*root_pieces),
'-i', issue_number,
'-p', self.m.properties['patchset'],
'-s', rietveld_url,
'--no-auth'],
)
step_result.presentation.links['Applied issue %s' % issue_number] = (
urlparse.urljoin(rietveld_url, str(issue_number)))
[
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/apply_issue.py",
"-r",
"[SLAVE_BUILD]/foo/bar",
"-i",
"1",
"-p",
"1",
"-s",
"http://review_tool.url",
"-E",
"[BUILD]/site_config/.rietveld_client_email",
"-k",
"[BUILD]/site_config/.rietveld_secret_key"
],
"cwd": "[SLAVE_BUILD]",
"name": "apply_issue",
"~followup_annotations": [
"@@@STEP_LINK@Applied issue 1@http://review_tool.url/1@@@"
]
},
{
"name": "$result",
"recipe_result": null,
"status_code": 0
}
]
\ No newline at end of file
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'recipe_engine/path',
'recipe_engine/properties',
'recipe_engine/step',
'rietveld',
]
def RunSteps(api):
api.path['checkout'] = api.path['slave_build']
api.rietveld.apply_issue('foo', 'bar', authentication='oauth2')
api.rietveld.calculate_issue_root({'project': ['']})
def GenTests(api):
yield (api.test('basic')
+ api.properties(issue=1,
patchset=1,
rietveld='http://review_tool.url')
)
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'git',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'recipe_engine/raw_io',
'rietveld',
'recipe_engine/step',
]
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import contextlib
import hashlib
from recipe_engine import recipe_api
PATCH_STORAGE_RIETVELD = 'rietveld'
PATCH_STORAGE_GIT = 'git'
PATCH_STORAGE_SVN = 'svn'
class TryserverApi(recipe_api.RecipeApi):
def __init__(self, *args, **kwargs):
super(TryserverApi, self).__init__(*args, **kwargs)
self._failure_reasons = []
@property
def patch_url(self):
"""Reads patch_url property and corrects it if needed."""
url = self.m.properties.get('patch_url')
return url
@property
def is_tryserver(self):
"""Returns true iff we can apply_issue or patch."""
return (self.can_apply_issue or self.is_patch_in_svn or
self.is_patch_in_git or self.is_gerrit_issue)
@property
def can_apply_issue(self):
"""Returns true iff the properties exist to apply_issue from rietveld."""
return (self.m.properties.get('rietveld')
and 'issue' in self.m.properties
and 'patchset' in self.m.properties)
@property
def is_gerrit_issue(self):
"""Returns true iff the properties exist to match a Gerrit issue."""
return ('event.patchSet.ref' in self.m.properties and
'event.change.url' in self.m.properties and
'event.change.id' in self.m.properties)
@property
def is_patch_in_svn(self):
"""Returns true iff the properties exist to patch from a patch URL."""
return self.patch_url
@property
def is_patch_in_git(self):
return (self.m.properties.get('patch_storage') == PATCH_STORAGE_GIT and
self.m.properties.get('patch_repo_url') and
self.m.properties.get('patch_ref'))
def _apply_patch_step(self, patch_file=None, patch_content=None, root=None):
assert not (patch_file and patch_content), (
'Please only specify either patch_file or patch_content, not both!')
patch_cmd = [
'patch',
'--dir', root or self.m.path['checkout'],
'--force',
'--forward',
'--remove-empty-files',
'--strip', '0',
]
if patch_file:
patch_cmd.extend(['--input', patch_file])
self.m.step('apply patch', patch_cmd,
stdin=patch_content)
def apply_from_svn(self, cwd):
"""Downloads patch from patch_url using svn-export and applies it"""
# TODO(nodir): accept these properties as parameters
patch_url = self.patch_url
root = cwd
if root is None:
issue_root = self.m.rietveld.calculate_issue_root()
root = self.m.path['checkout'].join(issue_root)
patch_file = self.m.raw_io.output('.diff')
ext = '.bat' if self.m.platform.is_win else ''
svn_cmd = ['svn' + ext, 'export', '--force', patch_url, patch_file]
result = self.m.step('download patch', svn_cmd,
step_test_data=self.test_api.patch_content)
result.presentation.logs['patch.diff'] = (
result.raw_io.output.split('\n'))
patch_content = self.m.raw_io.input(result.raw_io.output)
self._apply_patch_step(patch_content=patch_content, root=root)
def apply_from_git(self, cwd):
"""Downloads patch from given git repo and ref and applies it"""
# TODO(nodir): accept these properties as parameters
patch_repo_url = self.m.properties['patch_repo_url']
patch_ref = self.m.properties['patch_ref']
patch_dir = self.m.path.mkdtemp('patch')
git_setup_py = self.m.path['build'].join('scripts', 'slave', 'git_setup.py')
git_setup_args = ['--path', patch_dir, '--url', patch_repo_url]
patch_path = patch_dir.join('patch.diff')
self.m.python('patch git setup', git_setup_py, git_setup_args)
self.m.git('fetch', 'origin', patch_ref,
name='patch fetch', cwd=patch_dir)
self.m.git('clean', '-f', '-d', '-x',
name='patch clean', cwd=patch_dir)
self.m.git('checkout', '-f', 'FETCH_HEAD',
name='patch git checkout', cwd=patch_dir)
self._apply_patch_step(patch_file=patch_path, root=cwd)
self.m.step('remove patch', ['rm', '-rf', patch_dir])
def determine_patch_storage(self):
"""Determines patch_storage automatically based on properties."""
storage = self.m.properties.get('patch_storage')
if storage:
return storage
if self.can_apply_issue:
return PATCH_STORAGE_RIETVELD
elif self.is_patch_in_svn:
return PATCH_STORAGE_SVN
def maybe_apply_issue(self, cwd=None, authentication=None):
"""If we're a trybot, apply a codereview issue.
Args:
cwd: If specified, apply the patch from the specified directory.
authentication: authentication scheme whenever apply_issue.py is called.
This is only used if the patch comes from Rietveld. Possible values:
None, 'oauth2' (see also api.rietveld.apply_issue.)
"""
storage = self.determine_patch_storage()
if storage == PATCH_STORAGE_RIETVELD:
return self.m.rietveld.apply_issue(
self.m.rietveld.calculate_issue_root(),
authentication=authentication)
elif storage == PATCH_STORAGE_SVN:
return self.apply_from_svn(cwd)
elif storage == PATCH_STORAGE_GIT:
return self.apply_from_git(cwd)
else:
# Since this method is "maybe", we don't raise an Exception.
pass
def get_files_affected_by_patch(self):
git_diff_kwargs = {}
issue_root = self.m.rietveld.calculate_issue_root()
if issue_root:
git_diff_kwargs['cwd'] = self.m.path['checkout'].join(issue_root)
step_result = self.m.git('diff', '--cached', '--name-only',
name='git diff to analyze patch',
stdout=self.m.raw_io.output(),
step_test_data=lambda:
self.m.raw_io.test_api.stream_output('foo.cc'),
**git_diff_kwargs)
paths = step_result.stdout.split()
if issue_root:
paths = [self.m.path.join(issue_root, path) for path in paths]
if self.m.platform.is_win:
# Looks like "analyze" wants POSIX slashes even on Windows (since git
# uses that format even on Windows).
paths = [path.replace('\\', '/') for path in paths]
step_result.presentation.logs['files'] = paths
return paths
def set_subproject_tag(self, subproject_tag):
"""Adds a subproject tag to the build.
This can be used to distinguish between builds that execute different steps
depending on what was patched, e.g. blink vs. pure chromium patches.
"""
assert self.is_tryserver
step_result = self.m.step.active_result
step_result.presentation.properties['subproject_tag'] = subproject_tag
def _set_failure_type(self, failure_type):
if not self.is_tryserver:
return
step_result = self.m.step.active_result
step_result.presentation.properties['failure_type'] = failure_type
def set_patch_failure_tryjob_result(self):
"""Mark the tryjob result as failure to apply the patch."""
self._set_failure_type('PATCH_FAILURE')
def set_compile_failure_tryjob_result(self):
"""Mark the tryjob result as a compile failure."""
self._set_failure_type('COMPILE_FAILURE')
def set_test_failure_tryjob_result(self):
"""Mark the tryjob result as a test failure.
This means we started running actual tests (not prerequisite steps
like checkout or compile), and some of these tests have failed.
"""
self._set_failure_type('TEST_FAILURE')
def set_invalid_test_results_tryjob_result(self):
"""Mark the tryjob result as having invalid test results.
This means we run some tests, but the results were not valid
(e.g. no list of specific test cases that failed, or too many
tests failing, etc).
"""
self._set_failure_type('INVALID_TEST_RESULTS')
def add_failure_reason(self, reason):
"""
Records a more detailed reason why build is failing.
The reason can be any JSON-serializable object.
"""
assert self.m.json.is_serializable(reason)
self._failure_reasons.append(reason)
@contextlib.contextmanager
def set_failure_hash(self):
"""
Context manager that sets a failure_hash build property on StepFailure.
This can be used to easily compare whether two builds have failed
for the same reason. For example, if a patch is bad (breaks something),
we'd expect it to always break in the same way. Different failures
for the same patch are usually a sign of flakiness.
"""
try:
yield
except self.m.step.StepFailure as e:
self.add_failure_reason(e.reason)
failure_hash = hashlib.sha1()
failure_hash.update(self.m.json.dumps(self._failure_reasons))
step_result = self.m.step.active_result
step_result.presentation.properties['failure_hash'] = \
failure_hash.hexdigest()
raise
[
{
"cmd": [
"python",
"-u",
"[BUILD]/scripts/slave/git_setup.py",
"--path",
"[TMP_BASE]/patch_tmp_1",
"--url",
"http://patch.url/"
],
"cwd": "[SLAVE_BUILD]",
"name": "patch git setup"
},
{
"cmd": [
"git",
"fetch",
"origin",
"johndoe#123.diff"
],
"cwd": "[TMP_BASE]/patch_tmp_1",
"name": "patch fetch"
},
{
"cmd": [
"git",
"clean",
"-f",
"-d",
"-x"
],
"cwd": "[TMP_BASE]/patch_tmp_1",
"name": "patch clean"
},
{
"cmd": [
"git",
"checkout",
"-f",
"FETCH_HEAD"
],
"cwd": "[TMP_BASE]/patch_tmp_1",
"name": "patch git checkout"
},
{
"cmd": [
"patch",
"--dir",
"[SLAVE_BUILD]",
"--force",
"--forward",
"--remove-empty-files",
"--strip",
"0",
"--input",
"[TMP_BASE]/patch_tmp_1/patch.diff"
],
"cwd": "[SLAVE_BUILD]",
"name": "apply patch"
},
{
"cmd": [
"rm",
"-rf",
"[TMP_BASE]/patch_tmp_1"
],
"cwd": "[SLAVE_BUILD]",
"name": "remove patch"
},
{
"cmd": [
"git",
"diff",
"--cached",
"--name-only"
],
"cwd": "[SLAVE_BUILD]/v8",
"name": "git diff to analyze patch",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_LOG_LINE@files@v8/foo.cc@@@",
"@@@STEP_LOG_END@files@@@",
"@@@SET_BUILD_PROPERTY@failure_type@\"INVALID_TEST_RESULTS\"@@@",
"@@@SET_BUILD_PROPERTY@subproject_tag@\"v8\"@@@"
]
},
{
"cmd": [
"python",
"-u",
"import sys; sys.exit(1)"
],
"cwd": "[SLAVE_BUILD]",
"name": "fail",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_TEXT@foo@@@",
"@@@STEP_FAILURE@@@",
"@@@SET_BUILD_PROPERTY@failure_hash@\"c2311ad770732eade3d2f48247abd147e40a70e7\"@@@"
]
},
{
"name": "$result",
"reason": "Step('fail') failed with return_code 1",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"python",
"-u",
"[DEPOT_TOOLS]/apply_issue.py",
"-r",
"[SLAVE_BUILD]",
"-i",
"12853011",
"-p",
"1",
"-s",
"https://codereview.chromium.org",
"--no-auth"
],
"cwd": "[SLAVE_BUILD]",
"name": "apply_issue",
"~followup_annotations": [
"@@@STEP_LINK@Applied issue 12853011@https://codereview.chromium.org/12853011@@@"
]
},
{
"cmd": [
"git",
"diff",
"--cached",
"--name-only"
],
"cwd": "[SLAVE_BUILD]",
"name": "git diff to analyze patch",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_LOG_LINE@files@foo.cc@@@",
"@@@STEP_LOG_END@files@@@",
"@@@SET_BUILD_PROPERTY@failure_type@\"INVALID_TEST_RESULTS\"@@@",
"@@@SET_BUILD_PROPERTY@subproject_tag@\"v8\"@@@"
]
},
{
"cmd": [
"python",
"-u",
"import sys; sys.exit(1)"
],
"cwd": "[SLAVE_BUILD]",
"name": "fail",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_TEXT@foo@@@",
"@@@STEP_FAILURE@@@",
"@@@SET_BUILD_PROPERTY@failure_hash@\"c2311ad770732eade3d2f48247abd147e40a70e7\"@@@"
]
},
{
"name": "$result",
"reason": "Step('fail') failed with return_code 1",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"svn",
"export",
"--force",
"svn://checkout.url",
"/path/to/tmp/diff"
],
"cwd": "[SLAVE_BUILD]",
"name": "download patch",
"~followup_annotations": [
"@@@STEP_LOG_LINE@patch.diff@fake patch.diff content (line 1)@@@",
"@@@STEP_LOG_LINE@patch.diff@fake patch.diff content (line 2)@@@",
"@@@STEP_LOG_LINE@patch.diff@@@@",
"@@@STEP_LOG_END@patch.diff@@@"
]
},
{
"cmd": [
"patch",
"--dir",
"[SLAVE_BUILD]",
"--force",
"--forward",
"--remove-empty-files",
"--strip",
"0"
],
"cwd": "[SLAVE_BUILD]",
"name": "apply patch",
"stdin": "fake patch.diff content (line 1)\nfake patch.diff content (line 2)\n"
},
{
"cmd": [
"git",
"diff",
"--cached",
"--name-only"
],
"cwd": "[SLAVE_BUILD]",
"name": "git diff to analyze patch",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_LOG_LINE@files@foo.cc@@@",
"@@@STEP_LOG_END@files@@@",
"@@@SET_BUILD_PROPERTY@failure_type@\"INVALID_TEST_RESULTS\"@@@",
"@@@SET_BUILD_PROPERTY@subproject_tag@\"v8\"@@@"
]
},
{
"cmd": [
"python",
"-u",
"import sys; sys.exit(1)"
],
"cwd": "[SLAVE_BUILD]",
"name": "fail",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_TEXT@foo@@@",
"@@@STEP_FAILURE@@@",
"@@@SET_BUILD_PROPERTY@failure_hash@\"c2311ad770732eade3d2f48247abd147e40a70e7\"@@@"
]
},
{
"name": "$result",
"reason": "Step('fail') failed with return_code 1",
"status_code": 1
}
]
\ No newline at end of file
[
{
"cmd": [
"[DEPOT_TOOLS]\\git.bat",
"diff",
"--cached",
"--name-only"
],
"cwd": "[SLAVE_BUILD]",
"name": "git diff to analyze patch",
"stdout": "/path/to/tmp/",
"~followup_annotations": [
"@@@STEP_LOG_LINE@files@foo.cc@@@",
"@@@STEP_LOG_END@files@@@"
]
},
{
"cmd": [
"python",
"-u",
"import sys; sys.exit(1)"
],
"cwd": "[SLAVE_BUILD]",
"name": "fail",
"~followup_annotations": [
"step returned non-zero exit code: 1",
"@@@STEP_TEXT@foo@@@",
"@@@STEP_FAILURE@@@",
"@@@SET_BUILD_PROPERTY@failure_hash@\"c2311ad770732eade3d2f48247abd147e40a70e7\"@@@"
]
},
{
"name": "$result",
"reason": "Step('fail') failed with return_code 1",
"status_code": 1
}
]
\ No newline at end of file
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/properties',
'recipe_engine/python',
'tryserver',
]
def RunSteps(api):
api.path['checkout'] = api.path['slave_build']
api.tryserver.maybe_apply_issue()
api.tryserver.get_files_affected_by_patch()
if api.tryserver.is_tryserver:
api.tryserver.set_subproject_tag('v8')
api.tryserver.set_patch_failure_tryjob_result()
api.tryserver.set_compile_failure_tryjob_result()
api.tryserver.set_test_failure_tryjob_result()
api.tryserver.set_invalid_test_results_tryjob_result()
with api.tryserver.set_failure_hash():
api.python.failing_step('fail', 'foo')
def GenTests(api):
yield (api.test('with_svn_patch') +
api.properties(patch_url='svn://checkout.url'))
yield (api.test('with_git_patch') +
api.properties(
patch_storage='git',
patch_project='v8',
patch_repo_url='http://patch.url/',
patch_ref='johndoe#123.diff'))
yield (api.test('with_rietveld_patch') +
api.properties.tryserver())
yield (api.test('with_wrong_patch') + api.platform('win', 32))
from recipe_engine import recipe_test_api
class TryserverTestApi(recipe_test_api.RecipeTestApi):
def patch_content(self):
return self.m.raw_io.output(
'fake patch.diff content (line 1)\n'
'fake patch.diff content (line 2)\n')
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment