locs.py 14.3 KB
Newer Older
1 2
#!/usr/bin/env python

3 4 5 6 7 8 9 10
# Copyright 2018 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

""" locs.py - Count lines of code before and after preprocessor expansion
  Consult --help for more information.
"""

11 12 13
# for py2/py3 compatibility
from __future__ import print_function

14
import argparse
15
import json
16
import multiprocessing
17
import os
18
import re
19
import subprocess
20
import sys
21 22
import tempfile
import time
23
from collections import defaultdict
24 25
from concurrent.futures import ThreadPoolExecutor
from pathlib import Path
26

27 28
# for py2/py3 compatibility
try:
29
  FileNotFoundError
30
except NameError:
31
  FileNotFoundError = IOError
32

33
ARGPARSE = argparse.ArgumentParser(
34
    description=("A script that computes LoC for a build dir"),
35 36 37 38
    epilog="""Examples:
 Count with default settings for build in out/Default:
   locs.py --build-dir out/Default
 Count only a custom group of files settings for build in out/Default:
39 40 41
   tools/locs.py --build-dir out/Default
                 --group src-compiler '\.\./\.\./src/compiler'
                 --only src-compiler
42 43 44
 Report the 10 files with the worst expansion:
   tools/locs.py --build-dir out/Default --worst 10
 Report the 10 files with the worst expansion in src/compiler:
45 46 47
   tools/locs.py --build-dir out/Default --worst 10
                 --group src-compiler '\.\./\.\./src/compiler'
                 --only src-compiler
48 49 50 51 52 53 54 55 56 57 58 59 60 61 62
 Report the 10 largest files after preprocessing:
   tools/locs.py --build-dir out/Default --largest 10
 Report the 10 smallest input files:
   tools/locs.py --build-dir out/Default --smallest 10""",
    formatter_class=argparse.RawTextHelpFormatter
)

ARGPARSE.add_argument(
    '--json',
    action='store_true',
    default=False,
    help="output json instead of short summary")
ARGPARSE.add_argument(
    '--build-dir',
    type=str,
63 64
    help="Use specified build dir and generate necessary files",
    required=True)
65 66 67 68 69 70 71 72 73
ARGPARSE.add_argument(
    '--echocmd',
    action='store_true',
    default=False,
    help="output command used to compute LoC")
ARGPARSE.add_argument(
    '--only',
    action='append',
    default=[],
74
    help="Restrict counting to report group (can be passed multiple times)")
75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118
ARGPARSE.add_argument(
    '--not',
    action='append',
    default=[],
    help="Exclude specific group (can be passed multiple times)")
ARGPARSE.add_argument(
    '--list-groups',
    action='store_true',
    default=False,
    help="List groups and associated regular expressions")
ARGPARSE.add_argument(
    '--group',
    nargs=2,
    action='append',
    default=[],
    help="Add a report group (can be passed multiple times)")
ARGPARSE.add_argument(
    '--largest',
    type=int,
    nargs='?',
    default=0,
    const=3,
    help="Output the n largest files after preprocessing")
ARGPARSE.add_argument(
    '--worst',
    type=int,
    nargs='?',
    default=0,
    const=3,
    help="Output the n files with worst expansion by preprocessing")
ARGPARSE.add_argument(
    '--smallest',
    type=int,
    nargs='?',
    default=0,
    const=3,
    help="Output the n smallest input files")
ARGPARSE.add_argument(
    '--files',
    type=int,
    nargs='?',
    default=0,
    const=3,
    help="Output results for each file separately")
119 120 121 122 123
ARGPARSE.add_argument(
    '--jobs',
    type=int,
    default=multiprocessing.cpu_count(),
    help="Process specified number of files concurrently")
124 125 126 127 128 129 130 131 132 133 134

ARGS = vars(ARGPARSE.parse_args())


def MaxWidth(strings):
  max_width = 0
  for s in strings:
    max_width = max(max_width, len(s))
  return max_width


135
def GenerateCompileCommandsAndBuild(build_dir, out):
136
  if not os.path.isdir(build_dir):
137 138
    print("Error: Specified build dir {} is not a directory.".format(
        build_dir), file=sys.stderr)
139
    exit(1)
140 141 142 143 144 145

  autoninja = "autoninja -C {}".format(build_dir)
  if subprocess.call(autoninja, shell=True, stdout=out) != 0:
    print("Error: Building {} failed.".format(build_dir), file=sys.stderr)
    exit(1)

146
  compile_commands_file = "{}/compile_commands.json".format(build_dir)
147 148 149 150
  print("Generating compile commands in {}.".format(
      compile_commands_file), file=out)
  ninja = "ninja -C {} -t compdb cxx cc > {}".format(
      build_dir, compile_commands_file)
151 152
  if subprocess.call(ninja, shell=True, stdout=out) != 0:
    print("Error: Cound not generate {} for {}.".format(
153
        compile_commands_file, build_dir), file=sys.stderr)
154 155
    exit(1)

156 157 158 159 160 161 162 163 164 165 166
  ninja_deps_file = "{}/ninja-deps.txt".format(build_dir)
  print("Generating ninja dependencies in {}.".format(
      ninja_deps_file), file=out)
  ninja = "ninja -C {} -t deps > {}".format(
      build_dir, ninja_deps_file)
  if subprocess.call(ninja, shell=True, stdout=out) != 0:
    print("Error: Cound not generate {} for {}.".format(
        ninja_deps_file, build_dir), file=sys.stderr)
    exit(1)

  return compile_commands_file, ninja_deps_file
167

168 169 170 171 172 173 174

def fmt_bytes(num_bytes):
  if num_bytes > 1024*1024*1024:
    return int(num_bytes / (1024*1024)), "MB"
  elif num_bytes > 1024*1024:
    return int(num_bytes / (1024)), "kB"
  return int(num_bytes), " B"
175

176 177

class CompilationData:
178
  def __init__(self, loc, in_bytes, expanded, expanded_bytes):
179
    self.loc = loc
180
    self.in_bytes = in_bytes
181
    self.expanded = expanded
182
    self.expanded_bytes = expanded_bytes
183 184 185 186 187

  def ratio(self):
    return self.expanded / (self.loc+1)

  def to_string(self):
188 189 190 191
    exp_bytes, exp_unit = fmt_bytes(self.expanded_bytes)
    in_bytes, in_unit = fmt_bytes(self.in_bytes)
    return "{:>9,} LoC ({:>7,} {}) to {:>12,} LoC ({:>7,} {}) ({:>5.0f}x)".format(
        self.loc, in_bytes, in_unit, self.expanded, exp_bytes, exp_unit, self.ratio())
192

193

194
class File(CompilationData):
195
  def __init__(self, file, target, loc, in_bytes, expanded, expanded_bytes):
196
    super().__init__(loc, in_bytes, expanded, expanded_bytes)
197
    self.file = file
198
    self.target = target
199 200

  def to_string(self):
201
    return "{} {} {}".format(super().to_string(), self.file, self.target)
202 203 204 205


class Group(CompilationData):
  def __init__(self, name, regexp_string):
206
    super().__init__(0, 0, 0, 0)
207 208 209 210 211 212 213
    self.name = name
    self.count = 0
    self.regexp = re.compile(regexp_string)

  def account(self, unit):
    if (self.regexp.match(unit.file)):
      self.loc += unit.loc
214
      self.in_bytes += unit.in_bytes
215
      self.expanded += unit.expanded
216
      self.expanded_bytes += unit.expanded_bytes
217 218 219
      self.count += 1

  def to_string(self, name_width):
220 221
    return "{:<{}} ({:>5} files): {}".format(
        self.name, name_width, self.count, super().to_string())
222 223 224 225 226 227 228 229 230


def SetupReportGroups():
  default_report_groups = {"total": '.*',
                           "src": '\\.\\./\\.\\./src',
                           "test": '\\.\\./\\.\\./test',
                           "third_party": '\\.\\./\\.\\./third_party',
                           "gen": 'gen'}

231 232
  report_groups = default_report_groups.copy()
  report_groups.update(dict(ARGS['group']))
233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263

  if ARGS['only']:
    for only_arg in ARGS['only']:
      if not only_arg in report_groups.keys():
        print("Error: specified report group '{}' is not defined.".format(
            ARGS['only']))
        exit(1)
      else:
        report_groups = {
            k: v for (k, v) in report_groups.items() if k in ARGS['only']}

  if ARGS['not']:
    report_groups = {
        k: v for (k, v) in report_groups.items() if k not in ARGS['not']}

  if ARGS['list_groups']:
    print_cat_max_width = MaxWidth(list(report_groups.keys()) + ["Category"])
    print("  {:<{}}  {}".format("Category",
                                print_cat_max_width, "Regular expression"))
    for cat, regexp_string in report_groups.items():
      print("  {:<{}}: {}".format(
          cat, print_cat_max_width, regexp_string))

  report_groups = {k: Group(k, v) for (k, v) in report_groups.items()}

  return report_groups


class Results:
  def __init__(self):
    self.groups = SetupReportGroups()
264
    self.units = {}
265 266
    self.source_dependencies = {}
    self.header_dependents = {}
267

268
  def track(self, filename):
269 270 271 272 273 274
    is_tracked = False
    for group in self.groups.values():
      if group.regexp.match(filename):
        is_tracked = True
    return is_tracked

275 276
  def recordFile(self, filename, targetname, loc, in_bytes, expanded, expanded_bytes):
    unit = File(filename, targetname, loc, in_bytes, expanded, expanded_bytes)
277
    self.units[filename] = unit
278 279 280 281 282 283
    for group in self.groups.values():
      group.account(unit)

  def maxGroupWidth(self):
    return MaxWidth([v.name for v in self.groups.values()])

284
  def printGroupResults(self, file):
285
    for key in sorted(self.groups.keys()):
286 287 288
      print(self.groups[key].to_string(self.maxGroupWidth()), file=file)

  def printSorted(self, key, count, reverse, out):
289
    for unit in sorted(list(self.units.values()), key=key, reverse=reverse)[:count]:
290 291
      print(unit.to_string(), file=out)

292 293 294 295
  def addHeaderDeps(self, source_dependencies, header_dependents):
    self.source_dependencies = source_dependencies
    self.header_dependents = header_dependents

296 297 298 299

class LocsEncoder(json.JSONEncoder):
  def default(self, o):
    if isinstance(o, File):
300
      return {"file": o.file, "target": o.target, "loc": o.loc, "in_bytes": o.in_bytes,
301
              "expanded": o.expanded, "expanded_bytes": o.expanded_bytes}
302
    if isinstance(o, Group):
303 304
      return {"name": o.name, "loc": o.loc, "in_bytes": o.in_bytes,
              "expanded": o.expanded, "expanded_bytes": o.expanded_bytes}
305
    if isinstance(o, Results):
306 307 308
      return {"groups": o.groups, "units": o.units,
              "source_dependencies": o.source_dependencies,
              "header_dependents": o.header_dependents}
309
    return json.JSONEncoder.default(self, o)
310 311 312 313 314 315


class StatusLine:
  def __init__(self):
    self.max_width = 0

316
  def print(self, statusline, end="\r", file=sys.stdout):
317
    self.max_width = max(self.max_width, len(statusline))
318 319
    print("{0:<{1}}".format(statusline, self.max_width),
          end=end, file=file, flush=True)
320 321 322 323 324


class CommandSplitter:
  def __init__(self):
    self.cmd_pattern = re.compile(
325 326
        "([^\\s]*\\s+)?(?P<clangcmd>[^\\s]*clang.*)"
        " -c (?P<infile>.*) -o (?P<outfile>.*)")
327

328
  def process(self, compilation_unit):
329
    cmd = self.cmd_pattern.match(compilation_unit['command'])
330
    outfilename = cmd.group('outfile')
331 332
    infilename = cmd.group('infile')
    infile = Path(compilation_unit['directory']).joinpath(infilename)
333
    return (cmd.group('clangcmd'), infilename, infile, outfilename)
334

335

336 337 338 339 340 341 342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363 364 365 366 367 368 369
def parse_ninja_deps(ninja_deps):
  source_dependencies = {}
  header_dependents = defaultdict(int)
  current_target = None
  for line in ninja_deps:
    line = line.rstrip()
    # Ignore empty lines
    if not line:
      current_target = None
      continue
    if line[0] == ' ':
      # New dependency
      if len(line) < 5 or line[0:4] != '    ' or line[5] == ' ':
        sys.exit('Lines must have no indentation or exactly four ' +
                 'spaces.')
      dep = line[4:]
      if not re.search(r"\.(h|hpp)$", dep):
        continue
      header_dependents[dep] += 1
      continue
    # New target
    colon_pos = line.find(':')
    if colon_pos < 0:
      sys.exit('Unindented line must have a colon')
    if current_target is not None:
      sys.exit('Missing empty line before new target')
    current_target = line[0:colon_pos]
    match = re.search(r"#deps (\d+)", line)
    deps_number = match.group(1)
    source_dependencies[current_target] = int(deps_number)

  return (source_dependencies, header_dependents)


370
def Main():
371 372 373
  out = sys.stdout
  if ARGS['json']:
    out = sys.stderr
374

375 376 377 378 379
  compile_commands_file, ninja_deps_file = GenerateCompileCommandsAndBuild(
      ARGS['build_dir'], out)

  result = Results()
  status = StatusLine()
380 381 382

  try:
    with open(compile_commands_file) as file:
383
      compile_commands = json.load(file)
384 385 386
    with open(ninja_deps_file) as file:
      source_dependencies, header_dependents = parse_ninja_deps(file)
      result.addHeaderDeps(source_dependencies, header_dependents)
387
  except FileNotFoundError:
388 389
    print("Error: Cannot read '{}'. Consult --help to get started.".format(
        ninja_deps_file))
390 391
    exit(1)

392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420
  cmd_splitter = CommandSplitter()

  def count_lines_of_unit(ikey):
    i, key = ikey
    if not result.track(key['file']):
      return
    message = "[{}/{}] Counting LoCs of {}".format(
        i, len(compile_commands), key['file'])
    status.print(message, file=out)
    clangcmd, infilename, infile, outfilename = cmd_splitter.process(key)
    if not infile.is_file():
      return

    clangcmd = clangcmd + " -E -P " + \
        str(infile) + " -o /dev/stdout | sed '/^\\s*$/d' | wc -lc"
    loccmd = ("cat {}  | sed '\\;^\\s*//;d' | sed '\\;^/\\*;d'"
              " | sed '/^\\*/d' | sed '/^\\s*$/d' | wc -lc")
    loccmd = loccmd.format(infile)
    runcmd = " {} ; {}".format(clangcmd, loccmd)
    if ARGS['echocmd']:
      print(runcmd)
    process = subprocess.Popen(
        runcmd, shell=True, cwd=key['directory'], stdout=subprocess.PIPE)
    p = {'process': process, 'infile': infilename, 'outfile': outfilename}
    output, _ = p['process'].communicate()
    expanded, expanded_bytes, loc, in_bytes = list(map(int, output.split()))
    result.recordFile(p['infile'], p['outfile'], loc,
                      in_bytes, expanded, expanded_bytes)

421 422 423
  with tempfile.TemporaryDirectory(dir='/tmp/', prefix="locs.") as temp:
    start = time.time()

424 425
    with ThreadPoolExecutor(max_workers=ARGS['jobs']) as executor:
      list(executor.map(count_lines_of_unit, enumerate(compile_commands)))
426 427 428

    end = time.time()
    if ARGS['json']:
429 430
      print(json.dumps(result, ensure_ascii=False, cls=LocsEncoder))
    status.print("Processed {:,} files in {:,.2f} sec.".format(
431
        len(compile_commands), end-start), end="\n", file=out)
432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452
    result.printGroupResults(file=out)

    if ARGS['largest']:
      print("Largest {} files after expansion:".format(ARGS['largest']))
      result.printSorted(
          lambda v: v.expanded, ARGS['largest'], reverse=True, out=out)

    if ARGS['worst']:
      print("Worst expansion ({} files):".format(ARGS['worst']))
      result.printSorted(
          lambda v: v.ratio(), ARGS['worst'], reverse=True, out=out)

    if ARGS['smallest']:
      print("Smallest {} input files:".format(ARGS['smallest']))
      result.printSorted(
          lambda v: v.loc, ARGS['smallest'], reverse=False, out=out)

    if ARGS['files']:
      print("List of input files:")
      result.printSorted(
          lambda v: v.file, ARGS['files'], reverse=False, out=out)
453 454 455

  return 0

456

457 458
if __name__ == '__main__':
  sys.exit(Main())