Commit d08e8691 authored by Michael Achenbach's avatar Michael Achenbach Committed by Commit Bot

[build] Port latest MB from Chromium

This ports the MB script from Chromium until revision:
https://crrev.com/66958462e684149bcb220000868e8247096e435b

The main difference is the removed gyp support.

This reapplies V8-side changes from:
https://crrev.com/41d9e8571419acd3547c9810c55c5516cc4dee79
https://crrev.com/1cd6fd9ff8e88bffa7cbec7131b85ff086dc128c

Bug: chromium:772804
Change-Id: I530e01df0c2343e961519ac27b1e58a01bb63743
Reviewed-on: https://chromium-review.googlesource.com/911073Reviewed-by: 's avatarSergiy Byelozyorov <sergiyb@chromium.org>
Commit-Queue: Michael Achenbach <machenbach@chromium.org>
Cr-Commit-Position: refs/heads/master@{#51215}
parent 72006803
...@@ -4,16 +4,12 @@ ...@@ -4,16 +4,12 @@
# Use of this source code is governed by a BSD-style license that can be # Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file. # found in the LICENSE file.
"""MB - the Meta-Build wrapper around GYP and GN """MB - the Meta-Build wrapper around GN.
MB is a wrapper script for GYP and GN that can be used to generate build files MB is a wrapper script for GN that can be used to generate build files
for sets of canned configurations and analyze them. for sets of canned configurations and analyze them.
""" """
# TODO(thomasanderson): Remove this comment. It is added to
# workaround https://crbug.com/736215 for CL
# https://codereview.chromium.org/2974603002/
from __future__ import print_function from __future__ import print_function
import argparse import argparse
...@@ -22,6 +18,7 @@ import errno ...@@ -22,6 +18,7 @@ import errno
import json import json
import os import os
import pipes import pipes
import platform
import pprint import pprint
import re import re
import shutil import shutil
...@@ -95,21 +92,19 @@ class MetaBuildWrapper(object): ...@@ -95,21 +92,19 @@ class MetaBuildWrapper(object):
help='path to config file ' help='path to config file '
'(default is %(default)s)') '(default is %(default)s)')
subp.add_argument('-i', '--isolate-map-file', metavar='PATH', subp.add_argument('-i', '--isolate-map-file', metavar='PATH',
default=self.default_isolate_map,
help='path to isolate map file ' help='path to isolate map file '
'(default is %(default)s)') '(default is %(default)s)',
default=[],
action='append',
dest='isolate_map_files')
subp.add_argument('-g', '--goma-dir', subp.add_argument('-g', '--goma-dir',
help='path to goma directory') help='path to goma directory')
subp.add_argument('--gyp-script', metavar='PATH', # TODO(machenbach): Remove after removing on infra-side.
default=self.PathJoin('build', 'gyp_chromium'), subp.add_argument('--gyp-script', help='Deprecated')
help='path to gyp script relative to project root '
'(default is %(default)s)')
subp.add_argument('--android-version-code', subp.add_argument('--android-version-code',
help='Sets GN arg android_default_version_code and ' help='Sets GN arg android_default_version_code')
'GYP_DEFINE app_manifest_version_code')
subp.add_argument('--android-version-name', subp.add_argument('--android-version-name',
help='Sets GN arg android_default_version_name and ' help='Sets GN arg android_default_version_name')
'GYP_DEFINE app_manifest_version_name')
subp.add_argument('-n', '--dryrun', action='store_true', subp.add_argument('-n', '--dryrun', action='store_true',
help='Do a dry run (i.e., do nothing, just print ' help='Do a dry run (i.e., do nothing, just print '
'the commands that will run)') 'the commands that will run)')
...@@ -190,7 +185,6 @@ class MetaBuildWrapper(object): ...@@ -190,7 +185,6 @@ class MetaBuildWrapper(object):
' --test-launcher-retry-limit=0' ' --test-launcher-retry-limit=0'
'\n' '\n'
) )
AddCommonOptions(subp) AddCommonOptions(subp)
subp.add_argument('-j', '--jobs', dest='jobs', type=int, subp.add_argument('-j', '--jobs', dest='jobs', type=int,
help='Number of jobs to pass to ninja') help='Number of jobs to pass to ninja')
...@@ -202,6 +196,14 @@ class MetaBuildWrapper(object): ...@@ -202,6 +196,14 @@ class MetaBuildWrapper(object):
' This can be either a regular path or a ' ' This can be either a regular path or a '
'GN-style source-relative path like ' 'GN-style source-relative path like '
'//out/Default.')) '//out/Default.'))
subp.add_argument('-s', '--swarmed', action='store_true',
help='Run under swarming with the default dimensions')
subp.add_argument('-d', '--dimension', default=[], action='append', nargs=2,
dest='dimensions', metavar='FOO bar',
help='dimension to filter on')
subp.add_argument('--no-default-dimensions', action='store_false',
dest='default_dimensions', default=True,
help='Do not automatically add dimensions to the task')
subp.add_argument('target', nargs=1, subp.add_argument('target', nargs=1,
help='ninja target to build and run') help='ninja target to build and run')
subp.add_argument('extra_args', nargs='*', subp.add_argument('extra_args', nargs='*',
...@@ -217,26 +219,6 @@ class MetaBuildWrapper(object): ...@@ -217,26 +219,6 @@ class MetaBuildWrapper(object):
help='path to config file (default is %(default)s)') help='path to config file (default is %(default)s)')
subp.set_defaults(func=self.CmdValidate) subp.set_defaults(func=self.CmdValidate)
subp = subps.add_parser('audit',
help='Audit the config file to track progress')
subp.add_argument('-f', '--config-file', metavar='PATH',
default=self.default_config,
help='path to config file (default is %(default)s)')
subp.add_argument('-i', '--internal', action='store_true',
help='check internal masters also')
subp.add_argument('-m', '--master', action='append',
help='master to audit (default is all non-internal '
'masters in file)')
subp.add_argument('-u', '--url-template', action='store',
default='https://build.chromium.org/p/'
'{master}/json/builders',
help='URL scheme for JSON APIs to buildbot '
'(default: %(default)s) ')
subp.add_argument('-c', '--check-compile', action='store_true',
help='check whether tbd and master-only bots actually'
' do compiles')
subp.set_defaults(func=self.CmdAudit)
subp = subps.add_parser('gerrit-buildbucket-config', subp = subps.add_parser('gerrit-buildbucket-config',
help='Print buildbucket.config for gerrit ' help='Print buildbucket.config for gerrit '
'(see MB user guide)') '(see MB user guide)')
...@@ -276,11 +258,7 @@ class MetaBuildWrapper(object): ...@@ -276,11 +258,7 @@ class MetaBuildWrapper(object):
def CmdAnalyze(self): def CmdAnalyze(self):
vals = self.Lookup() vals = self.Lookup()
self.ClobberIfNeeded(vals)
if vals['type'] == 'gn':
return self.RunGNAnalyze(vals) return self.RunGNAnalyze(vals)
else:
return self.RunGYPAnalyze(vals)
def CmdExport(self): def CmdExport(self):
self.ReadConfigFile() self.ReadConfigFile()
...@@ -312,11 +290,7 @@ class MetaBuildWrapper(object): ...@@ -312,11 +290,7 @@ class MetaBuildWrapper(object):
def CmdGen(self): def CmdGen(self):
vals = self.Lookup() vals = self.Lookup()
self.ClobberIfNeeded(vals)
if vals['type'] == 'gn':
return self.RunGNGen(vals) return self.RunGNGen(vals)
else:
return self.RunGYPGen(vals)
def CmdHelp(self): def CmdHelp(self):
if self.args.subcommand: if self.args.subcommand:
...@@ -328,21 +302,14 @@ class MetaBuildWrapper(object): ...@@ -328,21 +302,14 @@ class MetaBuildWrapper(object):
vals = self.GetConfig() vals = self.GetConfig()
if not vals: if not vals:
return 1 return 1
if vals['type'] == 'gn':
return self.RunGNIsolate() return self.RunGNIsolate()
else:
return self.Build('%s_run' % self.args.target[0])
def CmdLookup(self): def CmdLookup(self):
vals = self.Lookup() vals = self.Lookup()
if vals['type'] == 'gn':
cmd = self.GNCmd('gen', '_path_') cmd = self.GNCmd('gen', '_path_')
gn_args = self.GNArgs(vals) gn_args = self.GNArgs(vals)
self.Print('\nWriting """\\\n%s""" to _path_/args.gn.\n' % gn_args) self.Print('\nWriting """\\\n%s""" to _path_/args.gn.\n' % gn_args)
env = None env = None
else:
cmd, env = self.GYPCmd('_path_', vals)
self.PrintCmd(cmd, env) self.PrintCmd(cmd, env)
return 0 return 0
...@@ -355,7 +322,6 @@ class MetaBuildWrapper(object): ...@@ -355,7 +322,6 @@ class MetaBuildWrapper(object):
build_dir = self.args.path[0] build_dir = self.args.path[0]
target = self.args.target[0] target = self.args.target[0]
if vals['type'] == 'gn':
if self.args.build: if self.args.build:
ret = self.Build(target) ret = self.Build(target)
if ret: if ret:
...@@ -363,11 +329,50 @@ class MetaBuildWrapper(object): ...@@ -363,11 +329,50 @@ class MetaBuildWrapper(object):
ret = self.RunGNIsolate() ret = self.RunGNIsolate()
if ret: if ret:
return ret return ret
if self.args.swarmed:
return self._RunUnderSwarming(build_dir, target)
else: else:
ret = self.Build('%s_run' % target) return self._RunLocallyIsolated(build_dir, target)
def _RunUnderSwarming(self, build_dir, target):
# TODO(dpranke): Look up the information for the target in
# the //testing/buildbot.json file, if possible, so that we
# can determine the isolate target, command line, and additional
# swarming parameters, if possible.
#
# TODO(dpranke): Also, add support for sharding and merging results.
dimensions = []
for k, v in self._DefaultDimensions() + self.args.dimensions:
dimensions += ['-d', k, v]
cmd = [
self.executable,
self.PathJoin('tools', 'swarming_client', 'isolate.py'),
'archive',
'-s',
self.ToSrcRelPath('%s/%s.isolated' % (build_dir, target)),
'-I', 'isolateserver.appspot.com',
]
ret, out, _ = self.Run(cmd, force_verbose=False)
if ret: if ret:
return ret return ret
isolated_hash = out.splitlines()[0].split()[0]
cmd = [
self.executable,
self.PathJoin('tools', 'swarming_client', 'swarming.py'),
'run',
'-s', isolated_hash,
'-I', 'isolateserver.appspot.com',
'-S', 'chromium-swarm.appspot.com',
] + dimensions
if self.args.extra_args:
cmd += ['--'] + self.args.extra_args
ret, _, _ = self.Run(cmd, force_verbose=True, buffer_output=False)
return ret
def _RunLocallyIsolated(self, build_dir, target):
cmd = [ cmd = [
self.executable, self.executable,
self.PathJoin('tools', 'swarming_client', 'isolate.py'), self.PathJoin('tools', 'swarming_client', 'isolate.py'),
...@@ -377,10 +382,26 @@ class MetaBuildWrapper(object): ...@@ -377,10 +382,26 @@ class MetaBuildWrapper(object):
] ]
if self.args.extra_args: if self.args.extra_args:
cmd += ['--'] + self.args.extra_args cmd += ['--'] + self.args.extra_args
ret, _, _ = self.Run(cmd, force_verbose=True, buffer_output=False)
return ret
ret, _, _ = self.Run(cmd, force_verbose=False, buffer_output=False) def _DefaultDimensions(self):
if not self.args.default_dimensions:
return []
return ret # This code is naive and just picks reasonable defaults per platform.
if self.platform == 'darwin':
os_dim = ('os', 'Mac-10.12')
elif self.platform.startswith('linux'):
os_dim = ('os', 'Ubuntu-14.04')
elif self.platform == 'win32':
os_dim = ('os', 'Windows-10-14393')
else:
raise MBErr('unrecognized platform string "%s"' % self.platform)
return [('pool', 'Chrome'),
('cpu', 'x86-64'),
os_dim]
def CmdBuildbucket(self): def CmdBuildbucket(self):
self.ReadConfigFile() self.ReadConfigFile()
...@@ -462,138 +483,17 @@ class MetaBuildWrapper(object): ...@@ -462,138 +483,17 @@ class MetaBuildWrapper(object):
self.Print('mb config file %s looks ok.' % self.args.config_file) self.Print('mb config file %s looks ok.' % self.args.config_file)
return 0 return 0
def CmdAudit(self):
"""Track the progress of the GYP->GN migration on the bots."""
# First, make sure the config file is okay, but don't print anything
# if it is (it will throw an error if it isn't).
self.CmdValidate(print_ok=False)
stats = OrderedDict()
STAT_MASTER_ONLY = 'Master only'
STAT_CONFIG_ONLY = 'Config only'
STAT_TBD = 'Still TBD'
STAT_GYP = 'Still GYP'
STAT_DONE = 'Done (on GN)'
stats[STAT_MASTER_ONLY] = 0
stats[STAT_CONFIG_ONLY] = 0
stats[STAT_TBD] = 0
stats[STAT_GYP] = 0
stats[STAT_DONE] = 0
def PrintBuilders(heading, builders, notes):
stats.setdefault(heading, 0)
stats[heading] += len(builders)
if builders:
self.Print(' %s:' % heading)
for builder in sorted(builders):
self.Print(' %s%s' % (builder, notes[builder]))
self.ReadConfigFile()
masters = self.args.master or self.masters
for master in sorted(masters):
url = self.args.url_template.replace('{master}', master)
self.Print('Auditing %s' % master)
MASTERS_TO_SKIP = (
'client.skia',
'client.v8.fyi',
'tryserver.v8',
)
if master in MASTERS_TO_SKIP:
# Skip these bots because converting them is the responsibility of
# those teams and out of scope for the Chromium migration to GN.
self.Print(' Skipped (out of scope)')
self.Print('')
continue
INTERNAL_MASTERS = ('official.desktop', 'official.desktop.continuous',
'internal.client.kitchensync')
if master in INTERNAL_MASTERS and not self.args.internal:
# Skip these because the servers aren't accessible by default ...
self.Print(' Skipped (internal)')
self.Print('')
continue
try:
# Fetch the /builders contents from the buildbot master. The
# keys of the dict are the builder names themselves.
json_contents = self.Fetch(url)
d = json.loads(json_contents)
except Exception as e:
self.Print(str(e))
return 1
config_builders = set(self.masters[master])
master_builders = set(d.keys())
both = master_builders & config_builders
master_only = master_builders - config_builders
config_only = config_builders - master_builders
tbd = set()
gyp = set()
done = set()
notes = {builder: '' for builder in config_builders | master_builders}
for builder in both:
config = self.masters[master][builder]
if config == 'tbd':
tbd.add(builder)
elif isinstance(config, dict):
vals = self.FlattenConfig(config.values()[0])
if vals['type'] == 'gyp':
gyp.add(builder)
else:
done.add(builder)
elif config.startswith('//'):
done.add(builder)
else:
vals = self.FlattenConfig(config)
if vals['type'] == 'gyp':
gyp.add(builder)
else:
done.add(builder)
if self.args.check_compile and (tbd or master_only):
either = tbd | master_only
for builder in either:
notes[builder] = ' (' + self.CheckCompile(master, builder) +')'
if master_only or config_only or tbd or gyp:
PrintBuilders(STAT_MASTER_ONLY, master_only, notes)
PrintBuilders(STAT_CONFIG_ONLY, config_only, notes)
PrintBuilders(STAT_TBD, tbd, notes)
PrintBuilders(STAT_GYP, gyp, notes)
else:
self.Print(' All GN!')
stats[STAT_DONE] += len(done)
self.Print('')
fmt = '{:<27} {:>4}'
self.Print(fmt.format('Totals', str(sum(int(v) for v in stats.values()))))
self.Print(fmt.format('-' * 27, '----'))
for stat, count in stats.items():
self.Print(fmt.format(stat, str(count)))
return 0
def GetConfig(self): def GetConfig(self):
build_dir = self.args.path[0] build_dir = self.args.path[0]
vals = self.DefaultVals() vals = self.DefaultVals()
if self.args.builder or self.args.master or self.args.config: if self.args.builder or self.args.master or self.args.config:
vals = self.Lookup() vals = self.Lookup()
if vals['type'] == 'gn':
# Re-run gn gen in order to ensure the config is consistent with the # Re-run gn gen in order to ensure the config is consistent with the
# build dir. # build dir.
self.RunGNGen(vals) self.RunGNGen(vals)
return vals return vals
mb_type_path = self.PathJoin(self.ToAbsPath(build_dir), 'mb_type')
if not self.Exists(mb_type_path):
toolchain_path = self.PathJoin(self.ToAbsPath(build_dir), toolchain_path = self.PathJoin(self.ToAbsPath(build_dir),
'toolchain.ninja') 'toolchain.ninja')
if not self.Exists(toolchain_path): if not self.Exists(toolchain_path):
...@@ -601,15 +501,8 @@ class MetaBuildWrapper(object): ...@@ -601,15 +501,8 @@ class MetaBuildWrapper(object):
'or pass in a -m/-b pair or a -c flag to specify the ' 'or pass in a -m/-b pair or a -c flag to specify the '
'configuration') 'configuration')
return {} return {}
else:
mb_type = 'gn'
else:
mb_type = self.ReadFile(mb_type_path).strip()
if mb_type == 'gn':
vals['gn_args'] = self.GNArgsFromDir(build_dir) vals['gn_args'] = self.GNArgsFromDir(build_dir)
vals['type'] = mb_type
return vals return vals
def GNArgsFromDir(self, build_dir): def GNArgsFromDir(self, build_dir):
...@@ -641,14 +534,6 @@ class MetaBuildWrapper(object): ...@@ -641,14 +534,6 @@ class MetaBuildWrapper(object):
raise MBErr('Config "%s" not found in %s' % raise MBErr('Config "%s" not found in %s' %
(config, self.args.config_file)) (config, self.args.config_file))
vals = self.FlattenConfig(config) vals = self.FlattenConfig(config)
# Do some basic sanity checking on the config so that we
# don't have to do this in every caller.
if 'type' not in vals:
vals['type'] = 'gn'
assert vals['type'] in ('gn', 'gyp'), (
'Unknown meta-build type "%s"' % vals['gn_args'])
return vals return vals
def ReadIOSBotConfig(self): def ReadIOSBotConfig(self):
...@@ -660,17 +545,10 @@ class MetaBuildWrapper(object): ...@@ -660,17 +545,10 @@ class MetaBuildWrapper(object):
return {} return {}
contents = json.loads(self.ReadFile(path)) contents = json.loads(self.ReadFile(path))
gyp_vals = contents.get('GYP_DEFINES', {})
if isinstance(gyp_vals, dict):
gyp_defines = ' '.join('%s=%s' % (k, v) for k, v in gyp_vals.items())
else:
gyp_defines = ' '.join(gyp_vals)
gn_args = ' '.join(contents.get('gn_args', [])) gn_args = ' '.join(contents.get('gn_args', []))
vals = self.DefaultVals() vals = self.DefaultVals()
vals['gn_args'] = gn_args vals['gn_args'] = gn_args
vals['gyp_defines'] = gyp_defines
vals['type'] = contents.get('mb_type', 'gn')
return vals return vals
def ReadConfigFile(self): def ReadConfigFile(self):
...@@ -689,14 +567,26 @@ class MetaBuildWrapper(object): ...@@ -689,14 +567,26 @@ class MetaBuildWrapper(object):
self.mixins = contents['mixins'] self.mixins = contents['mixins']
def ReadIsolateMap(self): def ReadIsolateMap(self):
if not self.Exists(self.args.isolate_map_file): if not self.args.isolate_map_files:
raise MBErr('isolate map file not found at %s' % self.args.isolate_map_files = [self.default_isolate_map]
self.args.isolate_map_file)
for f in self.args.isolate_map_files:
if not self.Exists(f):
raise MBErr('isolate map file not found at %s' % f)
isolate_maps = {}
for isolate_map in self.args.isolate_map_files:
try: try:
return ast.literal_eval(self.ReadFile(self.args.isolate_map_file)) isolate_map = ast.literal_eval(self.ReadFile(isolate_map))
duplicates = set(isolate_map).intersection(isolate_maps)
if duplicates:
raise MBErr(
'Duplicate targets in isolate map files: %s.' %
', '.join(duplicates))
isolate_maps.update(isolate_map)
except SyntaxError as e: except SyntaxError as e:
raise MBErr('Failed to parse isolate map file "%s": %s' % raise MBErr(
(self.args.isolate_map_file, e)) 'Failed to parse isolate map file "%s": %s' % (isolate_map, e))
return isolate_maps
def ConfigFromArgs(self): def ConfigFromArgs(self):
if self.args.config: if self.args.config:
...@@ -747,9 +637,6 @@ class MetaBuildWrapper(object): ...@@ -747,9 +637,6 @@ class MetaBuildWrapper(object):
'args_file': '', 'args_file': '',
'cros_passthrough': False, 'cros_passthrough': False,
'gn_args': '', 'gn_args': '',
'gyp_defines': '',
'gyp_crosscompile': False,
'type': 'gn',
} }
def FlattenMixins(self, mixins, vals, visited): def FlattenMixins(self, mixins, vals, visited):
...@@ -773,50 +660,11 @@ class MetaBuildWrapper(object): ...@@ -773,50 +660,11 @@ class MetaBuildWrapper(object):
vals['gn_args'] += ' ' + mixin_vals['gn_args'] vals['gn_args'] += ' ' + mixin_vals['gn_args']
else: else:
vals['gn_args'] = mixin_vals['gn_args'] vals['gn_args'] = mixin_vals['gn_args']
if 'gyp_crosscompile' in mixin_vals:
vals['gyp_crosscompile'] = mixin_vals['gyp_crosscompile']
if 'gyp_defines' in mixin_vals:
if vals['gyp_defines']:
vals['gyp_defines'] += ' ' + mixin_vals['gyp_defines']
else:
vals['gyp_defines'] = mixin_vals['gyp_defines']
if 'type' in mixin_vals:
vals['type'] = mixin_vals['type']
if 'mixins' in mixin_vals: if 'mixins' in mixin_vals:
self.FlattenMixins(mixin_vals['mixins'], vals, visited) self.FlattenMixins(mixin_vals['mixins'], vals, visited)
return vals return vals
def ClobberIfNeeded(self, vals):
path = self.args.path[0]
build_dir = self.ToAbsPath(path)
mb_type_path = self.PathJoin(build_dir, 'mb_type')
needs_clobber = False
new_mb_type = vals['type']
if self.Exists(build_dir):
if self.Exists(mb_type_path):
old_mb_type = self.ReadFile(mb_type_path)
if old_mb_type != new_mb_type:
self.Print("Build type mismatch: was %s, will be %s, clobbering %s" %
(old_mb_type, new_mb_type, path))
needs_clobber = True
else:
# There is no 'mb_type' file in the build directory, so this probably
# means that the prior build(s) were not done through mb, and we
# have no idea if this was a GYP build or a GN build. Clobber it
# to be safe.
self.Print("%s/mb_type missing, clobbering to be safe" % path)
needs_clobber = True
if self.args.dryrun:
return
if needs_clobber:
self.RemoveDirectory(build_dir)
self.MaybeMakeDirectory(build_dir)
self.WriteFile(mb_type_path, new_mb_type)
def RunGNGen(self, vals, compute_grit_inputs_for_analyze=False): def RunGNGen(self, vals, compute_grit_inputs_for_analyze=False):
build_dir = self.args.path[0] build_dir = self.args.path[0]
...@@ -861,6 +709,7 @@ class MetaBuildWrapper(object): ...@@ -861,6 +709,7 @@ class MetaBuildWrapper(object):
return ret return ret
android = 'target_os="android"' in vals['gn_args'] android = 'target_os="android"' in vals['gn_args']
fuchsia = 'target_os="fuchsia"' in vals['gn_args']
for target in swarming_targets: for target in swarming_targets:
if android: if android:
# Android targets may be either android_apk or executable. The former # Android targets may be either android_apk or executable. The former
...@@ -870,6 +719,11 @@ class MetaBuildWrapper(object): ...@@ -870,6 +719,11 @@ class MetaBuildWrapper(object):
runtime_deps_targets = [ runtime_deps_targets = [
target + '.runtime_deps', target + '.runtime_deps',
'obj/%s.stamp.runtime_deps' % label.replace(':', '/')] 'obj/%s.stamp.runtime_deps' % label.replace(':', '/')]
elif fuchsia:
# Only emit a runtime deps file for the group() target on Fuchsia.
label = isolate_map[target]['label']
runtime_deps_targets = [
'obj/%s.stamp.runtime_deps' % label.replace(':', '/')]
elif (isolate_map[target]['type'] == 'script' or elif (isolate_map[target]['type'] == 'script' or
isolate_map[target].get('label_type') == 'group'): isolate_map[target].get('label_type') == 'group'):
# For script targets, the build target is usually a group, # For script targets, the build target is usually a group,
...@@ -1023,38 +877,6 @@ class MetaBuildWrapper(object): ...@@ -1023,38 +877,6 @@ class MetaBuildWrapper(object):
gn_args = ('import("%s")\n' % vals['args_file']) + gn_args gn_args = ('import("%s")\n' % vals['args_file']) + gn_args
return gn_args return gn_args
def RunGYPGen(self, vals):
path = self.args.path[0]
output_dir = self.ParseGYPConfigPath(path)
cmd, env = self.GYPCmd(output_dir, vals)
ret, _, _ = self.Run(cmd, env=env)
return ret
def RunGYPAnalyze(self, vals):
output_dir = self.ParseGYPConfigPath(self.args.path[0])
if self.args.verbose:
inp = self.ReadInputJSON(['files', 'test_targets',
'additional_compile_targets'])
self.Print()
self.Print('analyze input:')
self.PrintJSON(inp)
self.Print()
cmd, env = self.GYPCmd(output_dir, vals)
cmd.extend(['-f', 'analyzer',
'-G', 'config_path=%s' % self.args.input_path[0],
'-G', 'analyzer_output_path=%s' % self.args.output_path[0]])
ret, _, _ = self.Run(cmd, env=env)
if not ret and self.args.verbose:
outp = json.loads(self.ReadFile(self.args.output_path[0]))
self.Print()
self.Print('analyze output:')
self.PrintJSON(outp)
self.Print()
return ret
def ToAbsPath(self, build_path, *comps): def ToAbsPath(self, build_path, *comps):
return self.PathJoin(self.chromium_src_dir, return self.PathJoin(self.chromium_src_dir,
self.ToSrcRelPath(build_path), self.ToSrcRelPath(build_path),
...@@ -1066,86 +888,6 @@ class MetaBuildWrapper(object): ...@@ -1066,86 +888,6 @@ class MetaBuildWrapper(object):
return path[2:].replace('/', self.sep) return path[2:].replace('/', self.sep)
return self.RelPath(path, self.chromium_src_dir) return self.RelPath(path, self.chromium_src_dir)
def ParseGYPConfigPath(self, path):
rpath = self.ToSrcRelPath(path)
output_dir, _, _ = rpath.rpartition(self.sep)
return output_dir
def GYPCmd(self, output_dir, vals):
if vals['cros_passthrough']:
if not 'GYP_DEFINES' in os.environ:
raise MBErr('MB is expecting GYP_DEFINES to be in the environment')
gyp_defines = os.environ['GYP_DEFINES']
if not 'chromeos=1' in gyp_defines:
raise MBErr('GYP_DEFINES is missing chromeos=1: (GYP_DEFINES=%s)' %
gyp_defines)
else:
gyp_defines = vals['gyp_defines']
goma_dir = self.args.goma_dir
# GYP uses shlex.split() to split the gyp defines into separate arguments,
# so we can support backslashes and and spaces in arguments by quoting
# them, even on Windows, where this normally wouldn't work.
if goma_dir and ('\\' in goma_dir or ' ' in goma_dir):
goma_dir = "'%s'" % goma_dir
if goma_dir:
gyp_defines += ' gomadir=%s' % goma_dir
android_version_code = self.args.android_version_code
if android_version_code:
gyp_defines += ' app_manifest_version_code=%s' % android_version_code
android_version_name = self.args.android_version_name
if android_version_name:
gyp_defines += ' app_manifest_version_name=%s' % android_version_name
cmd = [
self.executable,
self.args.gyp_script,
'-G',
'output_dir=' + output_dir,
]
# Ensure that we have an environment that only contains
# the exact values of the GYP variables we need.
env = os.environ.copy()
# This is a terrible hack to work around the fact that
# //tools/clang/scripts/update.py is invoked by GYP and GN but
# currently relies on an environment variable to figure out
# what revision to embed in the command line #defines.
# For GN, we've made this work via a gn arg that will cause update.py
# to get an additional command line arg, but getting that to work
# via GYP_DEFINES has proven difficult, so we rewrite the GYP_DEFINES
# to get rid of the arg and add the old var in, instead.
# See crbug.com/582737 for more on this. This can hopefully all
# go away with GYP.
m = re.search('llvm_force_head_revision=1\s*', gyp_defines)
if m:
env['LLVM_FORCE_HEAD_REVISION'] = '1'
gyp_defines = gyp_defines.replace(m.group(0), '')
# This is another terrible hack to work around the fact that
# GYP sets the link concurrency to use via the GYP_LINK_CONCURRENCY
# environment variable, and not via a proper GYP_DEFINE. See
# crbug.com/611491 for more on this.
m = re.search('gyp_link_concurrency=(\d+)(\s*)', gyp_defines)
if m:
env['GYP_LINK_CONCURRENCY'] = m.group(1)
gyp_defines = gyp_defines.replace(m.group(0), '')
env['GYP_GENERATORS'] = 'ninja'
if 'GYP_CHROMIUM_NO_ACTION' in env:
del env['GYP_CHROMIUM_NO_ACTION']
if 'GYP_CROSSCOMPILE' in env:
del env['GYP_CROSSCOMPILE']
env['GYP_DEFINES'] = gyp_defines
if vals['gyp_crosscompile']:
env['GYP_CROSSCOMPILE'] = '1'
return cmd, env
def RunGNAnalyze(self, vals): def RunGNAnalyze(self, vals):
# Analyze runs before 'gn gen' now, so we need to run gn gen # Analyze runs before 'gn gen' now, so we need to run gn gen
# in order to ensure that we have a build directory. # in order to ensure that we have a build directory.
...@@ -1347,9 +1089,6 @@ class MetaBuildWrapper(object): ...@@ -1347,9 +1089,6 @@ class MetaBuildWrapper(object):
if env and var in env: if env and var in env:
self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var]))) self.Print('%s%s=%s' % (env_prefix, var, env_quoter(env[var])))
print_env('GYP_CROSSCOMPILE')
print_env('GYP_DEFINES')
print_env('GYP_LINK_CONCURRENCY')
print_env('LLVM_FORCE_HEAD_REVISION') print_env('LLVM_FORCE_HEAD_REVISION')
if cmd[0] == self.executable: if cmd[0] == self.executable:
...@@ -1486,7 +1225,6 @@ def QuoteForSet(arg): ...@@ -1486,7 +1225,6 @@ def QuoteForSet(arg):
def QuoteForCmd(arg): def QuoteForCmd(arg):
# First, escape the arg so that CommandLineToArgvW will parse it properly. # First, escape the arg so that CommandLineToArgvW will parse it properly.
# From //tools/gyp/pylib/gyp/msvs_emulation.py:23.
if arg == '' or ' ' in arg or '"' in arg: if arg == '' or ' ' in arg or '"' in arg:
quote_re = re.compile(r'(\\*)"') quote_re = re.compile(r'(\\*)"')
arg = '"%s"' % (quote_re.sub(lambda mo: 2 * mo.group(1) + '\\"', arg)) arg = '"%s"' % (quote_re.sub(lambda mo: 2 * mo.group(1) + '\\"', arg))
......
...@@ -65,8 +65,6 @@ class FakeMBW(mb.MetaBuildWrapper): ...@@ -65,8 +65,6 @@ class FakeMBW(mb.MetaBuildWrapper):
self.files[path] = contents self.files[path] = contents
def Call(self, cmd, env=None, buffer_output=True): def Call(self, cmd, env=None, buffer_output=True):
if env:
self.cross_compile = env.get('GYP_CROSSCOMPILE')
self.calls.append(cmd) self.calls.append(cmd)
if self.cmds: if self.cmds:
return self.cmds.pop(0) return self.cmds.pop(0)
...@@ -112,13 +110,10 @@ TEST_CONFIG = """\ ...@@ -112,13 +110,10 @@ TEST_CONFIG = """\
'masters': { 'masters': {
'chromium': {}, 'chromium': {},
'fake_master': { 'fake_master': {
'fake_builder': 'gyp_rel_bot', 'fake_builder': 'rel_bot',
'fake_gn_builder': 'gn_rel_bot', 'fake_debug_builder': 'debug_goma',
'fake_gyp_crosscompile_builder': 'gyp_crosscompile', 'fake_args_bot': '//build/args/bots/fake_master/fake_args_bot.gn',
'fake_gn_debug_builder': 'gn_debug_goma', 'fake_multi_phase': { 'phase_1': 'phase_1', 'phase_2': 'phase_2'},
'fake_gyp_builder': 'gyp_debug',
'fake_gn_args_bot': '//build/args/bots/fake_master/fake_gn_args_bot.gn',
'fake_multi_phase': { 'phase_1': 'gn_phase_1', 'phase_2': 'gn_phase_2'},
'fake_args_file': 'args_file_goma', 'fake_args_file': 'args_file_goma',
'fake_args_file_twice': 'args_file_twice', 'fake_args_file_twice': 'args_file_twice',
}, },
...@@ -126,38 +121,26 @@ TEST_CONFIG = """\ ...@@ -126,38 +121,26 @@ TEST_CONFIG = """\
'configs': { 'configs': {
'args_file_goma': ['args_file', 'goma'], 'args_file_goma': ['args_file', 'goma'],
'args_file_twice': ['args_file', 'args_file'], 'args_file_twice': ['args_file', 'args_file'],
'gyp_rel_bot': ['gyp', 'rel', 'goma'], 'rel_bot': ['rel', 'goma', 'fake_feature1'],
'gn_debug_goma': ['gn', 'debug', 'goma'], 'debug_goma': ['debug', 'goma'],
'gyp_debug': ['gyp', 'debug', 'fake_feature1'], 'phase_1': ['phase_1'],
'gn_rel_bot': ['gn', 'rel', 'goma'], 'phase_2': ['phase_2'],
'gyp_crosscompile': ['gyp', 'crosscompile'],
'gn_phase_1': ['gn', 'phase_1'],
'gn_phase_2': ['gn', 'phase_2'],
}, },
'mixins': { 'mixins': {
'crosscompile': {
'gyp_crosscompile': True,
},
'fake_feature1': { 'fake_feature1': {
'gn_args': 'enable_doom_melon=true', 'gn_args': 'enable_doom_melon=true',
'gyp_defines': 'doom_melon=1',
}, },
'gyp': {'type': 'gyp'},
'gn': {'type': 'gn'},
'goma': { 'goma': {
'gn_args': 'use_goma=true', 'gn_args': 'use_goma=true',
'gyp_defines': 'goma=1',
}, },
'args_file': { 'args_file': {
'args_file': '//build/args/fake.gn', 'args_file': '//build/args/fake.gn',
}, },
'phase_1': { 'phase_1': {
'gn_args': 'phase=1', 'gn_args': 'phase=1',
'gyp_args': 'phase=1',
}, },
'phase_2': { 'phase_2': {
'gn_args': 'phase=2', 'gn_args': 'phase=2',
'gyp_args': 'phase=2',
}, },
'rel': { 'rel': {
'gn_args': 'is_debug=false', 'gn_args': 'is_debug=false',
...@@ -169,28 +152,6 @@ TEST_CONFIG = """\ ...@@ -169,28 +152,6 @@ TEST_CONFIG = """\
} }
""" """
GYP_HACKS_CONFIG = """\
{
'masters': {
'chromium': {},
'fake_master': {
'fake_builder': 'fake_config',
},
},
'configs': {
'fake_config': ['fake_mixin'],
},
'mixins': {
'fake_mixin': {
'type': 'gyp',
'gn_args': '',
'gyp_defines':
('foo=bar llvm_force_head_revision=1 '
'gyp_link_concurrency=1 baz=1'),
},
},
}
"""
TRYSERVER_CONFIG = """\ TRYSERVER_CONFIG = """\
{ {
...@@ -229,7 +190,7 @@ class UnitTest(unittest.TestCase): ...@@ -229,7 +190,7 @@ class UnitTest(unittest.TestCase):
}, },
}''') }''')
mbw.files.setdefault( mbw.files.setdefault(
mbw.ToAbsPath('//build/args/bots/fake_master/fake_gn_args_bot.gn'), mbw.ToAbsPath('//build/args/bots/fake_master/fake_args_bot.gn'),
'is_debug = false\n') 'is_debug = false\n')
if files: if files:
for path, contents in files.items(): for path, contents in files.items():
...@@ -249,37 +210,6 @@ class UnitTest(unittest.TestCase): ...@@ -249,37 +210,6 @@ class UnitTest(unittest.TestCase):
self.assertEqual(mbw.err, err) self.assertEqual(mbw.err, err)
return mbw return mbw
def test_clobber(self):
files = {
'/fake_src/out/Debug': None,
'/fake_src/out/Debug/mb_type': None,
}
mbw = self.fake_mbw(files)
# The first time we run this, the build dir doesn't exist, so no clobber.
self.check(['gen', '-c', 'gn_debug_goma', '//out/Debug'], mbw=mbw, ret=0)
self.assertEqual(mbw.rmdirs, [])
self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gn')
# The second time we run this, the build dir exists and matches, so no
# clobber.
self.check(['gen', '-c', 'gn_debug_goma', '//out/Debug'], mbw=mbw, ret=0)
self.assertEqual(mbw.rmdirs, [])
self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gn')
# Now we switch build types; this should result in a clobber.
self.check(['gen', '-c', 'gyp_debug', '//out/Debug'], mbw=mbw, ret=0)
self.assertEqual(mbw.rmdirs, ['/fake_src/out/Debug'])
self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gyp')
# Now we delete mb_type; this checks the case where the build dir
# exists but wasn't populated by mb; this should also result in a clobber.
del mbw.files['/fake_src/out/Debug/mb_type']
self.check(['gen', '-c', 'gyp_debug', '//out/Debug'], mbw=mbw, ret=0)
self.assertEqual(mbw.rmdirs,
['/fake_src/out/Debug', '/fake_src/out/Debug'])
self.assertEqual(mbw.files['/fake_src/out/Debug/mb_type'], 'gyp')
def test_analyze(self): def test_analyze(self):
files = {'/tmp/in.json': '''{\ files = {'/tmp/in.json': '''{\
"files": ["foo/foo_unittest.cc"], "files": ["foo/foo_unittest.cc"],
...@@ -295,7 +225,7 @@ class UnitTest(unittest.TestCase): ...@@ -295,7 +225,7 @@ class UnitTest(unittest.TestCase):
mbw = self.fake_mbw(files) mbw = self.fake_mbw(files)
mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '') mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '')
self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default', self.check(['analyze', '-c', 'debug_goma', '//out/Default',
'/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0) '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
out = json.loads(mbw.files['/tmp/out.json']) out = json.loads(mbw.files['/tmp/out.json'])
self.assertEqual(out, { self.assertEqual(out, {
...@@ -319,7 +249,7 @@ class UnitTest(unittest.TestCase): ...@@ -319,7 +249,7 @@ class UnitTest(unittest.TestCase):
mbw = self.fake_mbw(files) mbw = self.fake_mbw(files)
mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '') mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '')
self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default', self.check(['analyze', '-c', 'debug_goma', '//out/Default',
'/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0) '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
out = json.loads(mbw.files['/tmp/out.json']) out = json.loads(mbw.files['/tmp/out.json'])
...@@ -342,7 +272,7 @@ class UnitTest(unittest.TestCase): ...@@ -342,7 +272,7 @@ class UnitTest(unittest.TestCase):
mbw = self.fake_mbw(files) mbw = self.fake_mbw(files)
mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '') mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '')
self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default', self.check(['analyze', '-c', 'debug_goma', '//out/Default',
'/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0) '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
out = json.loads(mbw.files['/tmp/out.json']) out = json.loads(mbw.files['/tmp/out.json'])
...@@ -369,7 +299,7 @@ class UnitTest(unittest.TestCase): ...@@ -369,7 +299,7 @@ class UnitTest(unittest.TestCase):
mbw = self.fake_mbw(files) mbw = self.fake_mbw(files)
mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '') mbw.Call = lambda cmd, env=None, buffer_output=True: (0, '', '')
self.check(['analyze', '-c', 'gn_debug_goma', '//out/Default', self.check(['analyze', '-c', 'debug_goma', '//out/Default',
'/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0) '/tmp/in.json', '/tmp/out.json'], mbw=mbw, ret=0)
out = json.loads(mbw.files['/tmp/out.json']) out = json.loads(mbw.files['/tmp/out.json'])
...@@ -379,9 +309,9 @@ class UnitTest(unittest.TestCase): ...@@ -379,9 +309,9 @@ class UnitTest(unittest.TestCase):
# test_targets and additional_compile_targets. # test_targets and additional_compile_targets.
self.assertEqual(['all', 'foo_unittests'], out['compile_targets']) self.assertEqual(['all', 'foo_unittests'], out['compile_targets'])
def test_gn_gen(self): def test_gen(self):
mbw = self.fake_mbw() mbw = self.fake_mbw()
self.check(['gen', '-c', 'gn_debug_goma', '//out/Default', '-g', '/goma'], self.check(['gen', '-c', 'debug_goma', '//out/Default', '-g', '/goma'],
mbw=mbw, ret=0) mbw=mbw, ret=0)
self.assertMultiLineEqual(mbw.files['/fake_src/out/Default/args.gn'], self.assertMultiLineEqual(mbw.files['/fake_src/out/Default/args.gn'],
('goma_dir = "/goma"\n' ('goma_dir = "/goma"\n'
...@@ -394,7 +324,7 @@ class UnitTest(unittest.TestCase): ...@@ -394,7 +324,7 @@ class UnitTest(unittest.TestCase):
mbw.out) mbw.out)
mbw = self.fake_mbw(win32=True) mbw = self.fake_mbw(win32=True)
self.check(['gen', '-c', 'gn_debug_goma', '-g', 'c:\\goma', '//out/Debug'], self.check(['gen', '-c', 'debug_goma', '-g', 'c:\\goma', '//out/Debug'],
mbw=mbw, ret=0) mbw=mbw, ret=0)
self.assertMultiLineEqual(mbw.files['c:\\fake_src\\out\\Debug\\args.gn'], self.assertMultiLineEqual(mbw.files['c:\\fake_src\\out\\Debug\\args.gn'],
('goma_dir = "c:\\\\goma"\n' ('goma_dir = "c:\\\\goma"\n'
...@@ -404,14 +334,14 @@ class UnitTest(unittest.TestCase): ...@@ -404,14 +334,14 @@ class UnitTest(unittest.TestCase):
'--check\n', mbw.out) '--check\n', mbw.out)
mbw = self.fake_mbw() mbw = self.fake_mbw()
self.check(['gen', '-m', 'fake_master', '-b', 'fake_gn_args_bot', self.check(['gen', '-m', 'fake_master', '-b', 'fake_args_bot',
'//out/Debug'], '//out/Debug'],
mbw=mbw, ret=0) mbw=mbw, ret=0)
self.assertEqual( self.assertEqual(
mbw.files['/fake_src/out/Debug/args.gn'], mbw.files['/fake_src/out/Debug/args.gn'],
'import("//build/args/bots/fake_master/fake_gn_args_bot.gn")\n') 'import("//build/args/bots/fake_master/fake_args_bot.gn")\n')
def test_gn_gen_args_file_mixins(self): def test_gen_args_file_mixins(self):
mbw = self.fake_mbw() mbw = self.fake_mbw()
self.check(['gen', '-m', 'fake_master', '-b', 'fake_args_file', self.check(['gen', '-m', 'fake_master', '-b', 'fake_args_file',
'//out/Debug'], mbw=mbw, ret=0) '//out/Debug'], mbw=mbw, ret=0)
...@@ -425,14 +355,14 @@ class UnitTest(unittest.TestCase): ...@@ -425,14 +355,14 @@ class UnitTest(unittest.TestCase):
self.check(['gen', '-m', 'fake_master', '-b', 'fake_args_file_twice', self.check(['gen', '-m', 'fake_master', '-b', 'fake_args_file_twice',
'//out/Debug'], mbw=mbw, ret=1) '//out/Debug'], mbw=mbw, ret=1)
def test_gn_gen_fails(self): def test_gen_fails(self):
mbw = self.fake_mbw() mbw = self.fake_mbw()
mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '') mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
self.check(['gen', '-c', 'gn_debug_goma', '//out/Default'], mbw=mbw, ret=1) self.check(['gen', '-c', 'debug_goma', '//out/Default'], mbw=mbw, ret=1)
# TODO(machenbach): Comment back in after swarming file parameter is used. # TODO(machenbach): Comment back in after swarming file parameter is used.
""" """
def test_gn_gen_swarming(self): def test_gen_swarming(self):
files = { files = {
'/tmp/swarming_targets': 'base_unittests\n', '/tmp/swarming_targets': 'base_unittests\n',
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
...@@ -448,7 +378,7 @@ class UnitTest(unittest.TestCase): ...@@ -448,7 +378,7 @@ class UnitTest(unittest.TestCase):
} }
mbw = self.fake_mbw(files) mbw = self.fake_mbw(files)
self.check(['gen', self.check(['gen',
'-c', 'gn_debug_goma', '-c', 'debug_goma',
'--swarming-targets-file', '/tmp/swarming_targets', '--swarming-targets-file', '/tmp/swarming_targets',
'//out/Default'], mbw=mbw, ret=0) '//out/Default'], mbw=mbw, ret=0)
self.assertIn('/fake_src/out/Default/base_unittests.isolate', self.assertIn('/fake_src/out/Default/base_unittests.isolate',
...@@ -456,7 +386,7 @@ class UnitTest(unittest.TestCase): ...@@ -456,7 +386,7 @@ class UnitTest(unittest.TestCase):
self.assertIn('/fake_src/out/Default/base_unittests.isolated.gen.json', self.assertIn('/fake_src/out/Default/base_unittests.isolated.gen.json',
mbw.files) mbw.files)
def test_gn_gen_swarming_script(self): def test_gen_swarming_script(self):
files = { files = {
'/tmp/swarming_targets': 'cc_perftests\n', '/tmp/swarming_targets': 'cc_perftests\n',
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
...@@ -473,7 +403,7 @@ class UnitTest(unittest.TestCase): ...@@ -473,7 +403,7 @@ class UnitTest(unittest.TestCase):
} }
mbw = self.fake_mbw(files=files, win32=True) mbw = self.fake_mbw(files=files, win32=True)
self.check(['gen', self.check(['gen',
'-c', 'gn_debug_goma', '-c', 'debug_goma',
'--swarming-targets-file', '/tmp/swarming_targets', '--swarming-targets-file', '/tmp/swarming_targets',
'--isolate-map-file', '--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl', '/fake_src/testing/buildbot/gn_isolate_map.pyl',
...@@ -482,9 +412,77 @@ class UnitTest(unittest.TestCase): ...@@ -482,9 +412,77 @@ class UnitTest(unittest.TestCase):
mbw.files) mbw.files)
self.assertIn('c:\\fake_src\\out\\Default\\cc_perftests.isolated.gen.json', self.assertIn('c:\\fake_src\\out\\Default\\cc_perftests.isolated.gen.json',
mbw.files) mbw.files)
""" # pylint: disable=pointless-string-statement
def test_gn_isolate(self):
def test_multiple_isolate_maps(self):
files = {
'/tmp/swarming_targets': 'cc_perftests\n',
'/fake_src/testing/buildbot/gn_isolate_map.pyl': (
"{'cc_perftests': {"
" 'label': '//cc:cc_perftests',"
" 'type': 'raw',"
" 'args': [],"
"}}\n"
),
'/fake_src/testing/buildbot/gn_isolate_map2.pyl': (
"{'cc_perftests2': {"
" 'label': '//cc:cc_perftests',"
" 'type': 'raw',"
" 'args': [],"
"}}\n"
),
'c:\\fake_src\out\Default\cc_perftests.exe.runtime_deps': (
"cc_perftests\n"
),
}
mbw = self.fake_mbw(files=files, win32=True)
self.check(['gen',
'-c', 'debug_goma',
'--swarming-targets-file', '/tmp/swarming_targets',
'--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl',
'--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map2.pyl',
'//out/Default'], mbw=mbw, ret=0)
self.assertIn('c:\\fake_src\\out\\Default\\cc_perftests.isolate',
mbw.files)
self.assertIn('c:\\fake_src\\out\\Default\\cc_perftests.isolated.gen.json',
mbw.files)
def test_duplicate_isolate_maps(self):
files = {
'/tmp/swarming_targets': 'cc_perftests\n',
'/fake_src/testing/buildbot/gn_isolate_map.pyl': (
"{'cc_perftests': {"
" 'label': '//cc:cc_perftests',"
" 'type': 'raw',"
" 'args': [],"
"}}\n"
),
'/fake_src/testing/buildbot/gn_isolate_map2.pyl': (
"{'cc_perftests': {"
" 'label': '//cc:cc_perftests',"
" 'type': 'raw',"
" 'args': [],"
"}}\n"
),
'c:\\fake_src\out\Default\cc_perftests.exe.runtime_deps': (
"cc_perftests\n"
),
}
mbw = self.fake_mbw(files=files, win32=True)
# Check that passing duplicate targets into mb fails.
self.check(['gen',
'-c', 'debug_goma',
'--swarming-targets-file', '/tmp/swarming_targets',
'--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map.pyl',
'--isolate-map-file',
'/fake_src/testing/buildbot/gn_isolate_map2.pyl',
'//out/Default'], mbw=mbw, ret=1)
def test_isolate(self):
files = { files = {
'/fake_src/out/Default/toolchain.ninja': "", '/fake_src/out/Default/toolchain.ninja': "",
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
...@@ -498,7 +496,7 @@ class UnitTest(unittest.TestCase): ...@@ -498,7 +496,7 @@ class UnitTest(unittest.TestCase):
"base_unittests\n" "base_unittests\n"
), ),
} }
self.check(['isolate', '-c', 'gn_debug_goma', '//out/Default', self.check(['isolate', '-c', 'debug_goma', '//out/Default',
'base_unittests'], files=files, ret=0) 'base_unittests'], files=files, ret=0)
# test running isolate on an existing build_dir # test running isolate on an existing build_dir
...@@ -506,11 +504,10 @@ class UnitTest(unittest.TestCase): ...@@ -506,11 +504,10 @@ class UnitTest(unittest.TestCase):
self.check(['isolate', '//out/Default', 'base_unittests'], self.check(['isolate', '//out/Default', 'base_unittests'],
files=files, ret=0) files=files, ret=0)
files['/fake_src/out/Default/mb_type'] = 'gn\n'
self.check(['isolate', '//out/Default', 'base_unittests'], self.check(['isolate', '//out/Default', 'base_unittests'],
files=files, ret=0) files=files, ret=0)
def test_gn_run(self): def test_run(self):
files = { files = {
'/fake_src/testing/buildbot/gn_isolate_map.pyl': ( '/fake_src/testing/buildbot/gn_isolate_map.pyl': (
"{'base_unittests': {" "{'base_unittests': {"
...@@ -523,55 +520,51 @@ class UnitTest(unittest.TestCase): ...@@ -523,55 +520,51 @@ class UnitTest(unittest.TestCase):
"base_unittests\n" "base_unittests\n"
), ),
} }
self.check(['run', '-c', 'gn_debug_goma', '//out/Default', self.check(['run', '-c', 'debug_goma', '//out/Default',
'base_unittests'], files=files, ret=0) 'base_unittests'], files=files, ret=0)
def test_gn_lookup(self): def test_run_swarmed(self):
self.check(['lookup', '-c', 'gn_debug_goma'], ret=0) files = {
'/fake_src/testing/buildbot/gn_isolate_map.pyl': (
"{'base_unittests': {"
" 'label': '//base:base_unittests',"
" 'type': 'raw',"
" 'args': [],"
"}}\n"
),
'/fake_src/out/Default/base_unittests.runtime_deps': (
"base_unittests\n"
),
}
def run_stub(cmd, **_kwargs):
if 'isolate.py' in cmd[1]:
return 0, 'fake_hash base_unittests', ''
else:
return 0, '', ''
def test_gn_lookup_goma_dir_expansion(self): mbw = self.fake_mbw(files=files)
self.check(['lookup', '-c', 'gn_rel_bot', '-g', '/foo'], ret=0, mbw.Run = run_stub
self.check(['run', '-s', '-c', 'debug_goma', '//out/Default',
'base_unittests'], mbw=mbw, ret=0)
self.check(['run', '-s', '-c', 'debug_goma', '-d', 'os', 'Win7',
'//out/Default', 'base_unittests'], mbw=mbw, ret=0)
""" # pylint: disable=pointless-string-statement
def test_lookup(self):
self.check(['lookup', '-c', 'debug_goma'], ret=0)
def test_lookup_goma_dir_expansion(self):
self.check(['lookup', '-c', 'rel_bot', '-g', '/foo'], ret=0,
out=('\n' out=('\n'
'Writing """\\\n' 'Writing """\\\n'
'enable_doom_melon = true\n'
'goma_dir = "/foo"\n' 'goma_dir = "/foo"\n'
'is_debug = false\n' 'is_debug = false\n'
'use_goma = true\n' 'use_goma = true\n'
'""" to _path_/args.gn.\n\n' '""" to _path_/args.gn.\n\n'
'/fake_src/buildtools/linux64/gn gen _path_\n')) '/fake_src/buildtools/linux64/gn gen _path_\n'))
def test_gyp_analyze(self):
mbw = self.check(['analyze', '-c', 'gyp_rel_bot', '//out/Release',
'/tmp/in.json', '/tmp/out.json'], ret=0)
self.assertIn('analyzer', mbw.calls[0])
def test_gyp_crosscompile(self):
mbw = self.fake_mbw()
self.check(['gen', '-c', 'gyp_crosscompile', '//out/Release'],
mbw=mbw, ret=0)
self.assertTrue(mbw.cross_compile)
def test_gyp_gen(self):
self.check(['gen', '-c', 'gyp_rel_bot', '-g', '/goma', '//out/Release'],
ret=0,
out=("GYP_DEFINES='goma=1 gomadir=/goma'\n"
"python build/gyp_chromium -G output_dir=out\n"))
mbw = self.fake_mbw(win32=True)
self.check(['gen', '-c', 'gyp_rel_bot', '-g', 'c:\\goma', '//out/Release'],
mbw=mbw, ret=0,
out=("set GYP_DEFINES=goma=1 gomadir='c:\\goma'\n"
"python build\\gyp_chromium -G output_dir=out\n"))
def test_gyp_gen_fails(self):
mbw = self.fake_mbw()
mbw.Call = lambda cmd, env=None, buffer_output=True: (1, '', '')
self.check(['gen', '-c', 'gyp_rel_bot', '//out/Release'], mbw=mbw, ret=1)
def test_gyp_lookup_goma_dir_expansion(self):
self.check(['lookup', '-c', 'gyp_rel_bot', '-g', '/foo'], ret=0,
out=("GYP_DEFINES='goma=1 gomadir=/foo'\n"
"python build/gyp_chromium -G output_dir=_path_\n"))
def test_help(self): def test_help(self):
orig_stdout = sys.stdout orig_stdout = sys.stdout
try: try:
...@@ -589,7 +582,7 @@ class UnitTest(unittest.TestCase): ...@@ -589,7 +582,7 @@ class UnitTest(unittest.TestCase):
self.assertIn('Must specify a build --phase', mbw.out) self.assertIn('Must specify a build --phase', mbw.out)
# Check that passing a --phase to a single-phase builder fails. # Check that passing a --phase to a single-phase builder fails.
mbw = self.check(['lookup', '-m', 'fake_master', '-b', 'fake_gn_builder', mbw = self.check(['lookup', '-m', 'fake_master', '-b', 'fake_builder',
'--phase', 'phase_1'], ret=1) '--phase', 'phase_1'], ret=1)
self.assertIn('Must not specify a build --phase', mbw.out) self.assertIn('Must not specify a build --phase', mbw.out)
...@@ -611,16 +604,6 @@ class UnitTest(unittest.TestCase): ...@@ -611,16 +604,6 @@ class UnitTest(unittest.TestCase):
mbw = self.fake_mbw() mbw = self.fake_mbw()
self.check(['validate'], mbw=mbw, ret=0) self.check(['validate'], mbw=mbw, ret=0)
def test_gyp_env_hacks(self):
mbw = self.fake_mbw()
mbw.files[mbw.default_config] = GYP_HACKS_CONFIG
self.check(['lookup', '-c', 'fake_config'], mbw=mbw,
ret=0,
out=("GYP_DEFINES='foo=bar baz=1'\n"
"GYP_LINK_CONCURRENCY=1\n"
"LLVM_FORCE_HEAD_REVISION=1\n"
"python build/gyp_chromium -G output_dir=_path_\n"))
def test_buildbucket(self): def test_buildbucket(self):
mbw = self.fake_mbw() mbw = self.fake_mbw()
mbw.files[mbw.default_config] = TRYSERVER_CONFIG mbw.files[mbw.default_config] = TRYSERVER_CONFIG
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment