compiler.cc 47.9 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
//     * Redistributions of source code must retain the above copyright
//       notice, this list of conditions and the following disclaimer.
//     * Redistributions in binary form must reproduce the above
//       copyright notice, this list of conditions and the following
//       disclaimer in the documentation and/or other materials provided
//       with the distribution.
//     * Neither the name of Google Inc. nor the names of its
//       contributors may be used to endorse or promote products derived
//       from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

#include "v8.h"

30 31
#include "compiler.h"

32
#include "bootstrapper.h"
33
#include "codegen.h"
34
#include "compilation-cache.h"
35
#include "cpu-profiler.h"
36
#include "debug.h"
37
#include "deoptimizer.h"
38
#include "full-codegen.h"
39
#include "gdb-jit.h"
40
#include "typing.h"
41
#include "hydrogen.h"
42
#include "isolate-inl.h"
43
#include "lithium.h"
44
#include "liveedit.h"
45
#include "parser.h"
46
#include "rewriter.h"
47
#include "runtime-profiler.h"
48
#include "scanner-character-streams.h"
49
#include "scopeinfo.h"
50
#include "scopes.h"
51
#include "vm-state-inl.h"
52

53 54
namespace v8 {
namespace internal {
55

56

57
CompilationInfo::CompilationInfo(Handle<Script> script,
58
                                 Zone* zone)
59
    : flags_(LanguageModeField::encode(CLASSIC_MODE)),
60
      script_(script),
61 62
      osr_ast_id_(BailoutId::None()),
      osr_pc_offset_(0) {
63
  Initialize(script->GetIsolate(), BASE, zone);
64 65 66
}


67
CompilationInfo::CompilationInfo(Handle<SharedFunctionInfo> shared_info,
68
                                 Zone* zone)
69
    : flags_(LanguageModeField::encode(CLASSIC_MODE) | IsLazy::encode(true)),
70 71
      shared_info_(shared_info),
      script_(Handle<Script>(Script::cast(shared_info->script()))),
72 73
      osr_ast_id_(BailoutId::None()),
      osr_pc_offset_(0) {
74
  Initialize(script_->GetIsolate(), BASE, zone);
75 76 77
}


78
CompilationInfo::CompilationInfo(Handle<JSFunction> closure,
79
                                 Zone* zone)
80
    : flags_(LanguageModeField::encode(CLASSIC_MODE) | IsLazy::encode(true)),
81 82 83
      closure_(closure),
      shared_info_(Handle<SharedFunctionInfo>(closure->shared())),
      script_(Handle<Script>(Script::cast(shared_info_->script()))),
84
      context_(closure->context()),
85 86
      osr_ast_id_(BailoutId::None()),
      osr_pc_offset_(0) {
87
  Initialize(script_->GetIsolate(), BASE, zone);
88 89 90
}


91
CompilationInfo::CompilationInfo(HydrogenCodeStub* stub,
92
                                 Isolate* isolate,
93
                                 Zone* zone)
94 95
    : flags_(LanguageModeField::encode(CLASSIC_MODE) |
             IsLazy::encode(true)),
96 97
      osr_ast_id_(BailoutId::None()),
      osr_pc_offset_(0) {
98
  Initialize(isolate, STUB, zone);
99 100 101 102
  code_stub_ = stub;
}


103 104
void CompilationInfo::Initialize(Isolate* isolate,
                                 Mode mode,
105
                                 Zone* zone) {
106
  isolate_ = isolate;
107 108 109 110 111 112 113
  function_ = NULL;
  scope_ = NULL;
  global_scope_ = NULL;
  extension_ = NULL;
  pre_parse_data_ = NULL;
  zone_ = zone;
  deferred_handles_ = NULL;
114
  code_stub_ = NULL;
115
  prologue_offset_ = kPrologueOffsetNotSet;
116
  opt_count_ = shared_info().is_null() ? 0 : shared_info()->opt_count();
117 118
  no_frame_ranges_ = isolate->cpu_profiler()->is_profiling()
                   ? new List<OffsetRange>(2) : NULL;
119
  for (int i = 0; i < DependentCode::kGroupCount; i++) {
120
    dependencies_[i] = NULL;
121
  }
122 123 124 125
  if (mode == STUB) {
    mode_ = STUB;
    return;
  }
126
  mode_ = isolate->use_crankshaft() ? mode : NONOPT;
127
  abort_due_to_dependency_ = false;
128 129 130 131 132 133 134
  if (script_->type()->value() == Script::TYPE_NATIVE) {
    MarkAsNative();
  }
  if (!shared_info_.is_null()) {
    ASSERT(language_mode() == CLASSIC_MODE);
    SetLanguageMode(shared_info_->language_mode());
  }
135
  set_bailout_reason(kUnknown);
136 137 138
}


139 140
CompilationInfo::~CompilationInfo() {
  delete deferred_handles_;
141
  delete no_frame_ranges_;
142 143 144 145
#ifdef DEBUG
  // Check that no dependent maps have been added or added dependent maps have
  // been rolled back or committed.
  for (int i = 0; i < DependentCode::kGroupCount; i++) {
146
    ASSERT_EQ(NULL, dependencies_[i]);
147 148 149 150 151
  }
#endif  // DEBUG
}


152
void CompilationInfo::CommitDependencies(Handle<Code> code) {
153
  for (int i = 0; i < DependentCode::kGroupCount; i++) {
154 155
    ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
    if (group_objects == NULL) continue;
156
    ASSERT(!object_wrapper_.is_null());
157 158 159 160 161 162
    for (int j = 0; j < group_objects->length(); j++) {
      DependentCode::DependencyGroup group =
          static_cast<DependentCode::DependencyGroup>(i);
      DependentCode* dependent_code =
          DependentCode::ForObject(group_objects->at(j), group);
      dependent_code->UpdateToFinishedCode(group, this, *code);
163
    }
164
    dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
165 166 167 168
  }
}


169
void CompilationInfo::RollbackDependencies() {
170 171
  // Unregister from all dependent maps if not yet committed.
  for (int i = 0; i < DependentCode::kGroupCount; i++) {
172 173 174 175 176 177 178 179
    ZoneList<Handle<HeapObject> >* group_objects = dependencies_[i];
    if (group_objects == NULL) continue;
    for (int j = 0; j < group_objects->length(); j++) {
      DependentCode::DependencyGroup group =
          static_cast<DependentCode::DependencyGroup>(i);
      DependentCode* dependent_code =
          DependentCode::ForObject(group_objects->at(j), group);
      dependent_code->RemoveCompilationInfo(group, this);
180
    }
181
    dependencies_[i] = NULL;  // Zone-allocated, no need to delete.
182
  }
183 184 185
}


186
int CompilationInfo::num_parameters() const {
187 188
  ASSERT(!IsStub());
  return scope()->num_parameters();
189 190 191 192 193 194 195 196 197 198 199 200 201 202
}


int CompilationInfo::num_heap_slots() const {
  if (IsStub()) {
    return 0;
  } else {
    return scope()->num_heap_slots();
  }
}


Code::Flags CompilationInfo::flags() const {
  if (IsStub()) {
203 204 205
    return Code::ComputeFlags(code_stub()->GetCodeKind(),
                              code_stub()->GetICState(),
                              code_stub()->GetExtraICState(),
206 207
                              code_stub()->GetStubType(),
                              code_stub()->GetStubFlags());
208 209 210 211 212 213
  } else {
    return Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
  }
}


214
// Disable optimization for the rest of the compilation pipeline.
215
void CompilationInfo::DisableOptimization() {
216 217 218 219
  bool is_optimizable_closure =
    FLAG_optimize_closures &&
    closure_.is_null() &&
    !scope_->HasTrivialOuterContext() &&
220
    !scope_->outer_scope_calls_non_strict_eval() &&
221 222
    !scope_->inside_with();
  SetMode(is_optimizable_closure ? BASE : NONOPT);
223 224 225
}


226 227 228 229
// Primitive functions are unlikely to be picked up by the stack-walking
// profiler, so they trigger their own optimization when they're called
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
bool CompilationInfo::ShouldSelfOptimize() {
230
  return FLAG_self_optimization &&
231 232
      FLAG_crankshaft &&
      !function()->flags()->Contains(kDontSelfOptimize) &&
233
      !function()->dont_optimize() &&
234
      function()->scope()->AllowsLazyCompilation() &&
235 236 237 238
      (shared_info().is_null() || !shared_info()->optimization_disabled());
}


239 240 241 242 243 244 245 246
// Determine whether to use the full compiler for all code. If the flag
// --always-full-compiler is specified this is the case. For the virtual frame
// based compiler the full compiler is also used if a debugger is connected, as
// the code from the full compiler supports mode precise break points. For the
// crankshaft adaptive compiler debugging the optimized code is not possible at
// all. However crankshaft support recompilation of functions, so in this case
// the full compiler need not be be used if a debugger is attached, but only if
// break points has actually been set.
247
static bool IsDebuggerActive(Isolate* isolate) {
248
#ifdef ENABLE_DEBUGGER_SUPPORT
249
  return isolate->use_crankshaft() ?
250 251
    isolate->debug()->has_break_points() :
    isolate->debugger()->IsDebuggerActive();
252
#else
253
  return false;
254 255 256
#endif
}

257

258 259
static bool AlwaysFullCompiler(Isolate* isolate) {
  return FLAG_always_full_compiler || IsDebuggerActive(isolate);
260 261 262
}


263 264
void OptimizingCompiler::RecordOptimizationStats() {
  Handle<JSFunction> function = info()->closure();
265 266
  int opt_count = function->shared()->opt_count();
  function->shared()->set_opt_count(opt_count + 1);
267 268 269
  double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
  double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
  double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
270
  if (FLAG_trace_opt) {
271 272
    PrintF("[optimizing ");
    function->ShortPrint();
273 274
    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
           ms_codegen);
275 276 277 278 279 280
  }
  if (FLAG_trace_opt_stats) {
    static double compilation_time = 0.0;
    static int compiled_functions = 0;
    static int code_size = 0;

281
    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
282 283 284 285 286 287 288
    compiled_functions++;
    code_size += function->shared()->SourceSize();
    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
           compiled_functions,
           code_size,
           compilation_time);
  }
289
  if (FLAG_hydrogen_stats) {
290 291 292
    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
                                                    time_taken_to_optimize_,
                                                    time_taken_to_codegen_);
293
  }
294 295 296
}


297 298
// A return value of true indicates the compilation pipeline is still
// going, not necessarily that we optimized the code.
299
static bool MakeCrankshaftCode(CompilationInfo* info) {
300 301 302 303 304 305 306 307
  OptimizingCompiler compiler(info);
  OptimizingCompiler::Status status = compiler.CreateGraph();

  if (status != OptimizingCompiler::SUCCEEDED) {
    return status != OptimizingCompiler::FAILED;
  }
  status = compiler.OptimizeGraph();
  if (status != OptimizingCompiler::SUCCEEDED) {
308
    status = compiler.AbortOptimization();
309 310 311 312 313 314 315 316
    return status != OptimizingCompiler::FAILED;
  }
  status = compiler.GenerateAndInstallCode();
  return status != OptimizingCompiler::FAILED;
}


OptimizingCompiler::Status OptimizingCompiler::CreateGraph() {
317
  ASSERT(isolate()->use_crankshaft());
318 319
  ASSERT(info()->IsOptimizing());
  ASSERT(!info()->IsCompilingForDebugging());
320

321
  // We should never arrive here if there is no code object on the
322
  // shared function object.
323
  ASSERT(info()->shared_info()->code()->kind() == Code::FUNCTION);
324

325 326
  // We should never arrive here if optimization has been disabled on the
  // shared function info.
327
  ASSERT(!info()->shared_info()->optimization_disabled());
328

329 330 331
  // Fall back to using the full code generator if it's not possible
  // to use the Hydrogen-based optimizing compiler. We already have
  // generated code for this from the shared function object.
332
  if (AlwaysFullCompiler(isolate())) {
333
    info()->AbortOptimization();
334
    return SetLastStatus(BAILED_OUT);
335 336 337 338
  }

  // Limit the number of times we re-compile a functions with
  // the optimizing compiler.
339
  const int kMaxOptCount =
340
      FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
341
  if (info()->opt_count() > kMaxOptCount) {
342
    info()->set_bailout_reason(kOptimizedTooManyTimes);
343
    return AbortOptimization();
344 345 346 347 348 349 350
  }

  // Due to an encoding limit on LUnallocated operands in the Lithium
  // language, we cannot optimize functions with too many formal parameters
  // or perform on-stack replacement for function with too many
  // stack-allocated local variables.
  //
351 352
  // The encoding is as a signed value, with parameters and receiver using
  // the negative indices and locals the non-negative ones.
353
  const int parameter_limit = -LUnallocated::kMinFixedSlotIndex;
354
  Scope* scope = info()->scope();
355
  if ((scope->num_parameters() + 1) > parameter_limit) {
356
    info()->set_bailout_reason(kTooManyParameters);
357 358 359
    return AbortOptimization();
  }

360
  const int locals_limit = LUnallocated::kMaxFixedSlotIndex;
361
  if (info()->is_osr() &&
362
      scope->num_parameters() + 1 + scope->num_stack_slots() > locals_limit) {
363
    info()->set_bailout_reason(kTooManyParametersLocals);
364
    return AbortOptimization();
365 366 367
  }

  // Take --hydrogen-filter into account.
368
  if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
369 370
    info()->AbortOptimization();
    return SetLastStatus(BAILED_OUT);
371 372 373 374 375 376
  }

  // Recompile the unoptimized version of the code if the current version
  // doesn't have deoptimization support. Alternatively, we may decide to
  // run the full code generator to get a baseline for the compile-time
  // performance of the hydrogen-based compiler.
377
  bool should_recompile = !info()->shared_info()->has_deoptimization_support();
378
  if (should_recompile || FLAG_hydrogen_stats) {
379
    ElapsedTimer timer;
380
    if (FLAG_hydrogen_stats) {
381
      timer.Start();
382
    }
383
    CompilationInfoWithZone unoptimized(info()->shared_info());
384 385
    // Note that we use the same AST that we will use for generating the
    // optimized code.
386 387
    unoptimized.SetFunction(info()->function());
    unoptimized.SetScope(info()->scope());
388
    unoptimized.SetContext(info()->context());
389 390 391
    if (should_recompile) unoptimized.EnableDeoptimizationSupport();
    bool succeeded = FullCodeGenerator::MakeCode(&unoptimized);
    if (should_recompile) {
392 393
      if (!succeeded) return SetLastStatus(FAILED);
      Handle<SharedFunctionInfo> shared = info()->shared_info();
394 395
      shared->EnableDeoptimizationSupport(*unoptimized.code());
      // The existing unoptimized code was replaced with the new one.
396 397
      Compiler::RecordFunctionCompilation(
          Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
398
    }
399
    if (FLAG_hydrogen_stats) {
400
      isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
401
    }
402 403 404 405 406 407 408
  }

  // Check that the unoptimized, shared code is ready for
  // optimizations.  When using the always_opt flag we disregard the
  // optimizable marker in the code object and optimize anyway. This
  // is safe as long as the unoptimized code has deoptimization
  // support.
409
  ASSERT(FLAG_always_opt || info()->shared_info()->code()->optimizable());
410
  ASSERT(info()->shared_info()->has_deoptimization_support());
411 412

  if (FLAG_trace_hydrogen) {
413
    Handle<String> name = info()->function()->debug_name();
414 415
    PrintF("-----------------------------------------------------------\n");
    PrintF("Compiling method %s using hydrogen\n", *name->ToCString());
416
    isolate()->GetHTracer()->TraceCompilation(info());
417
  }
418 419

  // Type-check the function.
420
  AstTyper::Run(info());
421 422

  graph_builder_ = new(info()->zone()) HOptimizedGraphBuilder(info());
423 424

  Timer t(this, &time_taken_to_create_graph_);
425 426
  graph_ = graph_builder_->CreateGraph();

427
  if (isolate()->has_pending_exception()) {
428 429
    info()->SetCode(Handle<Code>::null());
    return SetLastStatus(FAILED);
430 431
  }

432 433 434 435 436 437 438 439
  // The function being compiled may have bailed out due to an inline
  // candidate bailing out.  In such a case, we don't disable
  // optimization on the shared_info.
  ASSERT(!graph_builder_->inline_bailout() || graph_ == NULL);
  if (graph_ == NULL) {
    if (graph_builder_->inline_bailout()) {
      info_->AbortOptimization();
      return SetLastStatus(BAILED_OUT);
440
    } else {
441
      return AbortOptimization();
442 443 444
    }
  }

445 446 447 448 449 450
  if (info()->HasAbortedDueToDependencyChange()) {
    info_->set_bailout_reason(kBailedOutDueToDependencyChange);
    info_->AbortOptimization();
    return SetLastStatus(BAILED_OUT);
  }

451 452 453
  return SetLastStatus(SUCCEEDED);
}

454

455
OptimizingCompiler::Status OptimizingCompiler::OptimizeGraph() {
456 457 458
  DisallowHeapAllocation no_allocation;
  DisallowHandleAllocation no_handles;
  DisallowHandleDereference no_deref;
459
  DisallowCodeDependencyChange no_dependency_change;
460

461 462 463
  ASSERT(last_status() == SUCCEEDED);
  Timer t(this, &time_taken_to_optimize_);
  ASSERT(graph_ != NULL);
464
  BailoutReason bailout_reason = kNoReason;
465
  if (!graph_->Optimize(&bailout_reason)) {
466
    if (bailout_reason == kNoReason) graph_builder_->Bailout(bailout_reason);
467
    return SetLastStatus(BAILED_OUT);
468 469 470
  } else {
    chunk_ = LChunk::NewChunk(graph_);
    if (chunk_ == NULL) {
471
      return SetLastStatus(BAILED_OUT);
472
    }
473
  }
474 475 476 477 478 479
  return SetLastStatus(SUCCEEDED);
}


OptimizingCompiler::Status OptimizingCompiler::GenerateAndInstallCode() {
  ASSERT(last_status() == SUCCEEDED);
480 481
  ASSERT(!info()->HasAbortedDueToDependencyChange());
  DisallowCodeDependencyChange no_dependency_change;
482 483 484 485
  {  // Scope for timer.
    Timer timer(this, &time_taken_to_codegen_);
    ASSERT(chunk_ != NULL);
    ASSERT(graph_ != NULL);
486 487 488 489
    // Deferred handles reference objects that were accessible during
    // graph creation.  To make sure that we don't encounter inconsistencies
    // between graph creation and code generation, we disallow accessing
    // objects through deferred handles during the latter, with exceptions.
490
    DisallowDeferredHandleDereference no_deferred_handle_deref;
491
    Handle<Code> optimized_code = chunk_->Codegen();
492
    if (optimized_code.is_null()) {
493
      if (info()->bailout_reason() == kNoReason) {
494 495
        info()->set_bailout_reason(kCodeGenerationFailed);
      }
496 497 498
      return AbortOptimization();
    }
    info()->SetCode(optimized_code);
499
  }
500
  RecordOptimizationStats();
501 502
  // Add to the weak list of optimized code objects.
  info()->context()->native_context()->AddOptimizedCode(*info()->code());
503
  return SetLastStatus(SUCCEEDED);
504 505 506
}


507
static bool GenerateCode(CompilationInfo* info) {
508
  bool is_optimizing = info->isolate()->use_crankshaft() &&
509 510 511
                       !info->IsCompilingForDebugging() &&
                       info->IsOptimizing();
  if (is_optimizing) {
512
    Logger::TimerEventScope timer(
513
        info->isolate(), Logger::TimerEventScope::v8_recompile_synchronous);
514 515 516 517 518 519 520
    return MakeCrankshaftCode(info);
  } else {
    if (info->IsOptimizing()) {
      // Have the CompilationInfo decide if the compilation should be
      // BASE or NONOPT.
      info->DisableOptimization();
    }
521
    Logger::TimerEventScope timer(
522
        info->isolate(), Logger::TimerEventScope::v8_compile_full_code);
523 524
    return FullCodeGenerator::MakeCode(info);
  }
525 526 527
}


528 529 530 531
static bool MakeCode(CompilationInfo* info) {
  // Precondition: code has been parsed.  Postcondition: the code field in
  // the compilation info is set if compilation succeeded.
  ASSERT(info->function() != NULL);
532
  return Rewriter::Rewrite(info) && Scope::Analyze(info) && GenerateCode(info);
533 534 535
}


536
#ifdef ENABLE_DEBUGGER_SUPPORT
537 538 539 540
bool Compiler::MakeCodeForLiveEdit(CompilationInfo* info) {
  // Precondition: code has been parsed.  Postcondition: the code field in
  // the compilation info is set if compilation succeeded.
  bool succeeded = MakeCode(info);
541
  if (!info->shared_info().is_null()) {
542
    Handle<ScopeInfo> scope_info = ScopeInfo::Create(info->scope(),
543
                                                     info->zone());
544
    info->shared_info()->set_scope_info(*scope_info);
545
  }
546
  return succeeded;
547 548 549 550
}
#endif


551 552 553 554 555 556 557
static bool DebuggerWantsEagerCompilation(CompilationInfo* info,
                                          bool allow_lazy_without_ctx = false) {
  return LiveEditFunctionTracker::IsActive(info->isolate()) ||
         (info->isolate()->DebuggerHasBreakPoints() && !allow_lazy_without_ctx);
}


558
static Handle<SharedFunctionInfo> MakeFunctionInfo(CompilationInfo* info) {
559 560
  Isolate* isolate = info->isolate();
  PostponeInterruptsScope postpone(isolate);
561

562
  ASSERT(!isolate->native_context().is_null());
563
  Handle<Script> script = info->script();
564 565 566
  // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
  FixedArray* array = isolate->native_context()->embedder_data();
  script->set_context_data(array->get(0));
567

568
#ifdef ENABLE_DEBUGGER_SUPPORT
569
  if (info->is_eval()) {
570
    script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
571 572
    // For eval scripts add information on the function from which eval was
    // called.
573
    if (info->is_eval()) {
574
      StackTraceFrameIterator it(isolate);
575
      if (!it.done()) {
576
        script->set_eval_from_shared(it.frame()->function()->shared());
577
        Code* code = it.frame()->LookupCode();
578
        int offset = static_cast<int>(
579
            it.frame()->pc() - code->instruction_start());
580 581
        script->set_eval_from_instructions_offset(Smi::FromInt(offset));
      }
582 583 584
    }
  }

585
  // Notify debugger
586
  isolate->debugger()->OnBeforeCompile(script);
587
#endif
588 589

  // Only allow non-global compiles for eval.
590
  ASSERT(info->is_eval() || info->is_global());
591 592 593 594 595 596 597 598 599
  {
    Parser parser(info);
    if ((info->pre_parse_data() != NULL ||
         String::cast(script->source())->length() > FLAG_min_preparse_length) &&
        !DebuggerWantsEagerCompilation(info))
      parser.set_allow_lazy(true);
    if (!parser.Parse()) {
      return Handle<SharedFunctionInfo>::null();
    }
600
  }
601

602 603 604
  // Measure how long it takes to do the compilation; only take the
  // rest of the function into account to avoid overlap with the
  // parsing statistics.
605
  HistogramTimer* rate = info->is_eval()
606 607
      ? info->isolate()->counters()->compile_eval()
      : info->isolate()->counters()->compile();
608
  HistogramTimerScope timer(rate);
609 610

  // Compile the code.
611
  FunctionLiteral* lit = info->function();
612
  LiveEditFunctionTracker live_edit_tracker(isolate, lit);
613
  if (!MakeCode(info)) {
614
    if (!isolate->has_pending_exception()) isolate->StackOverflow();
615
    return Handle<SharedFunctionInfo>::null();
616 617
  }

618
  // Allocate function.
619
  ASSERT(!info->code().is_null());
620
  Handle<SharedFunctionInfo> result =
621
      isolate->factory()->NewSharedFunctionInfo(
622 623
          lit->name(),
          lit->materialized_literal_count(),
624
          lit->is_generator(),
625
          info->code(),
626
          ScopeInfo::Create(info->scope(), info->zone()));
627 628 629 630

  ASSERT_EQ(RelocInfo::kNoPosition, lit->function_token_position());
  Compiler::SetFunctionInfo(result, lit, true, script);

631
  if (script->name()->IsString()) {
632
    PROFILE(isolate, CodeCreateEvent(
633 634 635
        info->is_eval()
            ? Logger::EVAL_TAG
            : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
636
        *info->code(),
637
        *result,
638
        info,
639
        String::cast(script->name())));
640 641
    GDBJIT(AddCode(Handle<String>(String::cast(script->name())),
                   script,
642 643
                   info->code(),
                   info));
644
  } else {
645
    PROFILE(isolate, CodeCreateEvent(
646 647 648
        info->is_eval()
            ? Logger::EVAL_TAG
            : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script),
649
        *info->code(),
650
        *result,
651
        info,
652
        isolate->heap()->empty_string()));
653
    GDBJIT(AddCode(Handle<String>(), script, info->code(), info));
654 655 656 657 658
  }

  // Hint to the runtime system used when allocating space for initial
  // property space by setting the expected number of properties for
  // the instances of the function.
659
  SetExpectedNofPropertiesFromEstimate(result, lit->expected_property_count());
660

661
  script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
662

663
#ifdef ENABLE_DEBUGGER_SUPPORT
664
  // Notify debugger
665 666
  isolate->debugger()->OnAfterCompile(
      script, Debugger::NO_AFTER_COMPILE_FLAGS);
667
#endif
668

669
  live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
670

671
  return result;
672 673 674
}


675 676 677 678
Handle<SharedFunctionInfo> Compiler::Compile(Handle<String> source,
                                             Handle<Object> script_name,
                                             int line_offset,
                                             int column_offset,
679
                                             bool is_shared_cross_origin,
680
                                             Handle<Context> context,
681
                                             v8::Extension* extension,
682
                                             ScriptDataImpl* pre_data,
683 684
                                             Handle<Object> script_data,
                                             NativesFlag natives) {
685
  Isolate* isolate = source->GetIsolate();
686
  int source_length = source->length();
687 688
  isolate->counters()->total_load_size()->Increment(source_length);
  isolate->counters()->total_compile_size()->Increment(source_length);
689 690

  // The VM is in the COMPILER state until exiting this function.
691
  VMState<COMPILER> state(isolate);
692 693

  CompilationCache* compilation_cache = isolate->compilation_cache();
694

695
  // Do a lookup in the compilation cache but not for extensions.
696
  Handle<SharedFunctionInfo> result;
697
  if (extension == NULL) {
698 699 700
    result = compilation_cache->LookupScript(source,
                                             script_name,
                                             line_offset,
701
                                             column_offset,
702
                                             is_shared_cross_origin,
703
                                             context);
704 705
  }

706
  if (result.is_null()) {
707 708 709 710 711 712 713 714
    // No cache entry found. Do pre-parsing, if it makes sense, and compile
    // the script.
    // Building preparse data that is only used immediately after is only a
    // saving if we might skip building the AST for lazily compiled functions.
    // I.e., preparse data isn't relevant when the lazy flag is off, and
    // for small sources, odds are that there aren't many functions
    // that would be compiled lazily anyway, so we skip the preparse step
    // in that case too.
715 716

    // Create a script object describing the script to be compiled.
717
    Handle<Script> script = isolate->factory()->NewScript(source);
718 719 720
    if (natives == NATIVES_CODE) {
      script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
    }
721 722 723 724 725
    if (!script_name.is_null()) {
      script->set_name(*script_name);
      script->set_line_offset(Smi::FromInt(line_offset));
      script->set_column_offset(Smi::FromInt(column_offset));
    }
726
    script->set_is_shared_cross_origin(is_shared_cross_origin);
727

728
    script->set_data(script_data.is_null() ? isolate->heap()->undefined_value()
729 730
                                           : *script_data);

731
    // Compile the function and add it to the cache.
732
    CompilationInfoWithZone info(script);
733 734 735
    info.MarkAsGlobal();
    info.SetExtension(extension);
    info.SetPreParseData(pre_data);
736
    info.SetContext(context);
737 738 739
    if (FLAG_use_strict) {
      info.SetLanguageMode(FLAG_harmony_scoping ? EXTENDED_MODE : STRICT_MODE);
    }
740
    result = MakeFunctionInfo(&info);
741
    if (extension == NULL && !result.is_null() && !result->dont_cache()) {
742
      compilation_cache->PutScript(source, context, result);
743
    }
744
  } else {
745 746
    if (result->ic_age() != isolate->heap()->global_ic_age()) {
      result->ResetForNewContext(isolate->heap()->global_ic_age());
747
    }
748 749
  }

750
  if (result.is_null()) isolate->ReportPendingMessages();
751 752 753 754
  return result;
}


755 756
Handle<SharedFunctionInfo> Compiler::CompileEval(Handle<String> source,
                                                 Handle<Context> context,
757
                                                 bool is_global,
758
                                                 LanguageMode language_mode,
759
                                                 ParseRestriction restriction,
760
                                                 int scope_position) {
761
  Isolate* isolate = source->GetIsolate();
762
  int source_length = source->length();
763 764
  isolate->counters()->total_eval_size()->Increment(source_length);
  isolate->counters()->total_compile_size()->Increment(source_length);
765 766

  // The VM is in the COMPILER state until exiting this function.
767
  VMState<COMPILER> state(isolate);
768

769
  // Do a lookup in the compilation cache; if the entry is not there, invoke
770
  // the compiler and add the result to the cache.
771
  Handle<SharedFunctionInfo> result;
772 773 774 775
  CompilationCache* compilation_cache = isolate->compilation_cache();
  result = compilation_cache->LookupEval(source,
                                         context,
                                         is_global,
776
                                         language_mode,
777
                                         scope_position);
778

779 780
  if (result.is_null()) {
    // Create a script object describing the script to be compiled.
781
    Handle<Script> script = isolate->factory()->NewScript(source);
782
    CompilationInfoWithZone info(script);
783 784
    info.MarkAsEval();
    if (is_global) info.MarkAsGlobal();
785
    info.SetLanguageMode(language_mode);
786
    info.SetParseRestriction(restriction);
787
    info.SetContext(context);
788
    result = MakeFunctionInfo(&info);
789
    if (!result.is_null()) {
790 791
      // Explicitly disable optimization for eval code. We're not yet prepared
      // to handle eval-code in the optimizing compiler.
792
      result->DisableOptimization(kEval);
793

794 795
      // If caller is strict mode, the result must be in strict mode or
      // extended mode as well, but not the other way around. Consider:
796
      // eval("'use strict'; ...");
797 798 799 800 801
      ASSERT(language_mode != STRICT_MODE || !result->is_classic_mode());
      // If caller is in extended mode, the result must also be in
      // extended mode.
      ASSERT(language_mode != EXTENDED_MODE ||
             result->is_extended_mode());
802 803 804 805
      if (!result->dont_cache()) {
        compilation_cache->PutEval(
            source, context, is_global, result, scope_position);
      }
806
    }
807
  } else {
808 809
    if (result->ic_age() != isolate->heap()->global_ic_age()) {
      result->ResetForNewContext(isolate->heap()->global_ic_age());
810
    }
811
  }
812

813
  return result;
814 815 816
}


817 818 819 820 821 822 823 824
static bool InstallFullCode(CompilationInfo* info) {
  // Update the shared function info with the compiled code and the
  // scope info.  Please note, that the order of the shared function
  // info initialization is important since set_scope_info might
  // trigger a GC, causing the ASSERT below to be invalid if the code
  // was flushed. By setting the code object last we avoid this.
  Handle<SharedFunctionInfo> shared = info->shared_info();
  Handle<Code> code = info->code();
825
  CHECK(code->kind() == Code::FUNCTION);
826 827 828 829
  Handle<JSFunction> function = info->closure();
  Handle<ScopeInfo> scope_info =
      ScopeInfo::Create(info->scope(), info->zone());
  shared->set_scope_info(*scope_info);
830
  shared->ReplaceCode(*code);
831 832 833 834
  if (!function.is_null()) {
    function->ReplaceCode(*code);
    ASSERT(!function->IsOptimized());
  }
835

836 837 838 839 840 841 842
  // Set the expected number of properties for instances.
  FunctionLiteral* lit = info->function();
  int expected = lit->expected_property_count();
  SetExpectedNofPropertiesFromEstimate(shared, expected);

  // Check the function has compiled code.
  ASSERT(shared->is_compiled());
843
  shared->set_dont_optimize_reason(lit->dont_optimize_reason());
844 845 846
  shared->set_dont_inline(lit->flags()->Contains(kDontInline));
  shared->set_ast_node_count(lit->ast_node_count());

847
  if (info->isolate()->use_crankshaft() &&
848 849 850 851 852 853
      !function.is_null() &&
      !shared->optimization_disabled()) {
    // If we're asked to always optimize, we compile the optimized
    // version of the function right away - unless the debugger is
    // active as it makes no sense to compile optimized code then.
    if (FLAG_always_opt &&
854
        !info->isolate()->DebuggerHasBreakPoints()) {
855
      CompilationInfoWithZone optimized(function);
856
      optimized.SetOptimizing(BailoutId::None());
857 858 859 860 861
      return Compiler::CompileLazy(&optimized);
    }
  }
  return true;
}
862 863


864
static void InstallCodeCommon(CompilationInfo* info) {
865
  Handle<SharedFunctionInfo> shared = info->shared_info();
866 867 868 869 870 871 872
  Handle<Code> code = info->code();
  ASSERT(!code.is_null());

  // Set optimizable to false if this is disallowed by the shared
  // function info, e.g., we might have flushed the code and must
  // reset this bit when lazy compiling the code again.
  if (shared->optimization_disabled()) code->set_optimizable(false);
873

874 875 876 877
  if (shared->code() == *code) {
    // Do not send compilation event for the same code twice.
    return;
  }
878 879 880 881 882 883
  Compiler::RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
}


static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
  Handle<Code> code = info->code();
884 885 886
  if (code->kind() != Code::OPTIMIZED_FUNCTION) return;  // Nothing to do.

  // Cache non-OSR optimized code.
887
  if (FLAG_cache_optimized_code && !info->is_osr()) {
888
    Handle<JSFunction> function = info->closure();
889 890
    Handle<SharedFunctionInfo> shared(function->shared());
    Handle<FixedArray> literals(function->literals());
891
    Handle<Context> native_context(function->context()->native_context());
892
    SharedFunctionInfo::AddToOptimizedCodeMap(
893
        shared, native_context, code, literals);
894 895 896 897 898
  }
}


static bool InstallCodeFromOptimizedCodeMap(CompilationInfo* info) {
899 900 901
  if (!info->IsOptimizing()) return false;  // Nothing to look up.

  // Lookup non-OSR optimized code.
902
  if (FLAG_cache_optimized_code && !info->is_osr()) {
903
    Handle<SharedFunctionInfo> shared = info->shared_info();
904 905
    Handle<JSFunction> function = info->closure();
    ASSERT(!function.is_null());
906 907
    Handle<Context> native_context(function->context()->native_context());
    int index = shared->SearchOptimizedCodeMap(*native_context);
908 909
    if (index > 0) {
      if (FLAG_trace_opt) {
910 911 912
        PrintF("[found optimized code for ");
        function->ShortPrint();
        PrintF("]\n");
913
      }
914 915
      // Caching of optimized code enabled and optimized code found.
      shared->InstallFromOptimizedCodeMap(*function, index);
916 917 918
      return true;
    }
  }
919 920 921 922 923 924 925 926
  return false;
}


bool Compiler::CompileLazy(CompilationInfo* info) {
  Isolate* isolate = info->isolate();

  // The VM is in the COMPILER state until exiting this function.
927
  VMState<COMPILER> state(isolate);
928 929 930 931 932 933 934 935

  PostponeInterruptsScope postpone(isolate);

  Handle<SharedFunctionInfo> shared = info->shared_info();
  int compiled_size = shared->end_position() - shared->start_position();
  isolate->counters()->total_compile_size()->Increment(compiled_size);

  if (InstallCodeFromOptimizedCodeMap(info)) return true;
936

937
  // Generate the AST for the lazily compiled function.
938
  if (Parser::Parse(info)) {
939 940 941
    // Measure how long it takes to do the lazy compilation; only take the
    // rest of the function into account to avoid overlap with the lazy
    // parsing statistics.
942
    HistogramTimerScope timer(isolate->counters()->compile_lazy());
943

944 945 946 947
    // After parsing we know the function's language mode. Remember it.
    LanguageMode language_mode = info->function()->language_mode();
    info->SetLanguageMode(language_mode);
    shared->set_language_mode(language_mode);
mmaly@chromium.org's avatar
mmaly@chromium.org committed
948

949 950
    // Compile the code.
    if (!MakeCode(info)) {
951 952
      if (!isolate->has_pending_exception()) {
        isolate->StackOverflow();
953
      }
954
    } else {
955
      InstallCodeCommon(info);
956

957
      if (info->IsOptimizing()) {
958
        // Optimized code successfully created.
959
        Handle<Code> code = info->code();
960
        ASSERT(shared->scope_info() != ScopeInfo::Empty(isolate));
961
        // TODO(titzer): Only replace the code if it was not an OSR compile.
962 963 964
        info->closure()->ReplaceCode(*code);
        InsertCodeIntoOptimizedCodeMap(info);
        return true;
965 966
      } else if (!info->is_osr()) {
        // Compilation failed. Replace with full code if not OSR compile.
967
        return InstallFullCode(info);
968 969
      }
    }
970
  }
971

972 973
  ASSERT(info->code().is_null());
  return false;
974 975 976
}


977 978 979
bool Compiler::RecompileConcurrent(Handle<JSFunction> closure,
                                   uint32_t osr_pc_offset) {
  bool compiling_for_osr = (osr_pc_offset != 0);
980 981

  Isolate* isolate = closure->GetIsolate();
982
  // Here we prepare compile data for the concurrent recompilation thread, but
983 984
  // this still happens synchronously and interrupts execution.
  Logger::TimerEventScope timer(
985
      isolate, Logger::TimerEventScope::v8_recompile_synchronous);
986

987
  if (!isolate->optimizing_compiler_thread()->IsQueueAvailable()) {
988
    if (FLAG_trace_concurrent_recompilation) {
989 990 991
      PrintF("  ** Compilation queue full, will retry optimizing ");
      closure->PrintName();
      PrintF(" on next run.\n");
992
    }
993
    return false;
994 995 996
  }

  SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(closure));
997 998 999 1000 1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011 1012 1013 1014
  Handle<SharedFunctionInfo> shared = info->shared_info();

  if (compiling_for_osr) {
    BailoutId osr_ast_id =
        shared->code()->TranslatePcOffsetToAstId(osr_pc_offset);
    ASSERT(!osr_ast_id.IsNone());
    info->SetOptimizing(osr_ast_id);
    info->set_osr_pc_offset(osr_pc_offset);

    if (FLAG_trace_osr) {
      PrintF("[COSR - attempt to queue ");
      closure->PrintName();
      PrintF(" at AST id %d]\n", osr_ast_id.ToInt());
    }
  } else {
    info->SetOptimizing(BailoutId::None());
  }

1015
  VMState<COMPILER> state(isolate);
1016 1017 1018 1019 1020 1021 1022 1023
  PostponeInterruptsScope postpone(isolate);

  int compiled_size = shared->end_position() - shared->start_position();
  isolate->counters()->total_compile_size()->Increment(compiled_size);

  {
    CompilationHandleScope handle_scope(*info);

1024 1025
    if (!compiling_for_osr && InstallCodeFromOptimizedCodeMap(*info)) {
      return true;
1026
    }
1027

1028
    if (Parser::Parse(*info)) {
1029 1030 1031 1032 1033 1034 1035 1036 1037 1038
      LanguageMode language_mode = info->function()->language_mode();
      info->SetLanguageMode(language_mode);
      shared->set_language_mode(language_mode);
      info->SaveHandles();

      if (Rewriter::Rewrite(*info) && Scope::Analyze(*info)) {
        OptimizingCompiler* compiler =
            new(info->zone()) OptimizingCompiler(*info);
        OptimizingCompiler::Status status = compiler->CreateGraph();
        if (status == OptimizingCompiler::SUCCEEDED) {
1039 1040
          info.Detach();
          shared->code()->set_profiler_ticks(0);
1041
          isolate->optimizing_compiler_thread()->QueueForOptimization(compiler);
1042 1043
          ASSERT(!isolate->has_pending_exception());
          return true;
1044 1045 1046 1047 1048 1049 1050 1051
        } else if (status == OptimizingCompiler::BAILED_OUT) {
          isolate->clear_pending_exception();
          InstallFullCode(*info);
        }
      }
    }
  }

1052
  if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1053
  return false;
1054 1055 1056
}


1057 1058
Handle<Code> Compiler::InstallOptimizedCode(
    OptimizingCompiler* optimizing_compiler) {
1059
  SmartPointer<CompilationInfo> info(optimizing_compiler->info());
1060 1061
  // The function may have already been optimized by OSR.  Simply continue.
  // Except when OSR already disabled optimization for some reason.
1062
  if (info->shared_info()->optimization_disabled()) {
1063
    info->AbortOptimization();
1064
    InstallFullCode(*info);
1065
    if (FLAG_trace_concurrent_recompilation) {
1066 1067 1068 1069
      PrintF("  ** aborting optimization for ");
      info->closure()->PrintName();
      PrintF(" as it has been disabled.\n");
    }
1070
    ASSERT(!info->closure()->IsInRecompileQueue());
1071
    return Handle<Code>::null();
1072
  }
1073

1074
  Isolate* isolate = info->isolate();
1075
  VMState<COMPILER> state(isolate);
1076
  Logger::TimerEventScope timer(
1077
      isolate, Logger::TimerEventScope::v8_recompile_synchronous);
1078 1079 1080
  // If crankshaft succeeded, install the optimized code else install
  // the unoptimized code.
  OptimizingCompiler::Status status = optimizing_compiler->last_status();
1081
  if (info->HasAbortedDueToDependencyChange()) {
1082
    info->set_bailout_reason(kBailedOutDueToDependencyChange);
1083 1084
    status = optimizing_compiler->AbortOptimization();
  } else if (status != OptimizingCompiler::SUCCEEDED) {
1085
    info->set_bailout_reason(kFailedBailedOutLastTime);
1086
    status = optimizing_compiler->AbortOptimization();
1087
  } else if (isolate->DebuggerHasBreakPoints()) {
1088
    info->set_bailout_reason(kDebuggerIsActive);
1089
    status = optimizing_compiler->AbortOptimization();
1090 1091 1092 1093 1094 1095 1096 1097 1098
  } else {
    status = optimizing_compiler->GenerateAndInstallCode();
    ASSERT(status == OptimizingCompiler::SUCCEEDED ||
           status == OptimizingCompiler::BAILED_OUT);
  }

  InstallCodeCommon(*info);
  if (status == OptimizingCompiler::SUCCEEDED) {
    Handle<Code> code = info->code();
1099
    ASSERT(info->shared_info()->scope_info() != ScopeInfo::Empty(isolate));
1100 1101
    info->closure()->ReplaceCode(*code);
    if (info->shared_info()->SearchOptimizedCodeMap(
1102
            info->closure()->context()->native_context()) == -1) {
1103 1104
      InsertCodeIntoOptimizedCodeMap(*info);
    }
1105
    if (FLAG_trace_concurrent_recompilation) {
1106 1107 1108 1109
      PrintF("  ** Optimized code for ");
      info->closure()->PrintName();
      PrintF(" installed.\n");
    }
1110
  } else {
1111
    info->AbortOptimization();
1112 1113
    InstallFullCode(*info);
  }
1114 1115 1116
  // Optimized code is finally replacing unoptimized code.  Reset the latter's
  // profiler ticks to prevent too soon re-opt after a deopt.
  info->shared_info()->code()->set_profiler_ticks(0);
1117
  ASSERT(!info->closure()->IsInRecompileQueue());
1118 1119
  return (status == OptimizingCompiler::SUCCEEDED) ? info->code()
                                                   : Handle<Code>::null();
1120 1121 1122
}


1123
Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(FunctionLiteral* literal,
1124 1125
                                                       Handle<Script> script) {
  // Precondition: code has been parsed and scopes have been analyzed.
1126
  CompilationInfoWithZone info(script);
1127 1128
  info.SetFunction(literal);
  info.SetScope(literal->scope());
1129
  info.SetLanguageMode(literal->scope()->language_mode());
1130

1131
  Isolate* isolate = info.isolate();
1132
  Factory* factory = isolate->factory();
1133
  LiveEditFunctionTracker live_edit_tracker(isolate, literal);
1134 1135 1136 1137 1138
  // Determine if the function can be lazily compiled. This is necessary to
  // allow some of our builtin JS files to be lazily compiled. These
  // builtins cannot be handled lazily by the parser, since we have to know
  // if a function uses the special natives syntax, which is something the
  // parser records.
1139 1140 1141
  // If the debugger requests compilation for break points, we cannot be
  // aggressive about lazy compilation, because it might trigger compilation
  // of functions without an outer context when setting a breakpoint through
1142
  // Debug::FindSharedFunctionInfoInScript.
1143
  bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1144
  bool allow_lazy = literal->AllowsLazyCompilation() &&
1145
      !DebuggerWantsEagerCompilation(&info, allow_lazy_without_ctx);
1146

1147
  Handle<ScopeInfo> scope_info(ScopeInfo::Empty(isolate));
1148

1149
  // Generate code
1150
  if (FLAG_lazy && allow_lazy && !literal->is_parenthesized()) {
1151
    Handle<Code> code = isolate->builtins()->LazyCompile();
1152
    info.SetCode(code);
1153
  } else if (GenerateCode(&info)) {
1154
    ASSERT(!info.code().is_null());
1155
    scope_info = ScopeInfo::Create(info.scope(), info.zone());
1156 1157
  } else {
    return Handle<SharedFunctionInfo>::null();
1158 1159
  }

1160
  // Create a shared function info object.
1161
  Handle<SharedFunctionInfo> result =
1162
      factory->NewSharedFunctionInfo(literal->name(),
1163
                                     literal->materialized_literal_count(),
1164
                                     literal->is_generator(),
1165
                                     info.code(),
1166
                                     scope_info);
1167
  SetFunctionInfo(result, literal, false, script);
1168
  RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1169
  result->set_allows_lazy_compilation(allow_lazy);
1170
  result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);
1171 1172 1173

  // Set the expected number of properties for instances and return
  // the resulting function.
1174
  SetExpectedNofPropertiesFromEstimate(result,
1175
                                       literal->expected_property_count());
1176
  live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1177
  return result;
1178 1179 1180 1181 1182 1183 1184
}


// Sets the function info on a function.
// The start_position points to the first '(' character after the function name
// in the full script source. When counting characters in the script source the
// the first character is number 0 (not 1).
1185
void Compiler::SetFunctionInfo(Handle<SharedFunctionInfo> function_info,
1186 1187 1188
                               FunctionLiteral* lit,
                               bool is_toplevel,
                               Handle<Script> script) {
1189 1190
  function_info->set_length(lit->parameter_count());
  function_info->set_formal_parameter_count(lit->parameter_count());
1191 1192 1193 1194 1195
  function_info->set_script(*script);
  function_info->set_function_token_position(lit->function_token_position());
  function_info->set_start_position(lit->start_position());
  function_info->set_end_position(lit->end_position());
  function_info->set_is_expression(lit->is_expression());
1196
  function_info->set_is_anonymous(lit->is_anonymous());
1197 1198
  function_info->set_is_toplevel(is_toplevel);
  function_info->set_inferred_name(*lit->inferred_name());
1199
  function_info->set_allows_lazy_compilation(lit->AllowsLazyCompilation());
1200 1201
  function_info->set_allows_lazy_compilation_without_context(
      lit->AllowsLazyCompilationWithoutContext());
1202
  function_info->set_language_mode(lit->language_mode());
1203 1204
  function_info->set_uses_arguments(lit->scope()->arguments() != NULL);
  function_info->set_has_duplicate_parameters(lit->has_duplicate_parameters());
1205
  function_info->set_ast_node_count(lit->ast_node_count());
1206
  function_info->set_is_function(lit->is_function());
1207
  function_info->set_dont_optimize_reason(lit->dont_optimize_reason());
1208
  function_info->set_dont_inline(lit->flags()->Contains(kDontInline));
1209
  function_info->set_dont_cache(lit->flags()->Contains(kDontCache));
1210
  function_info->set_is_generator(lit->is_generator());
1211 1212 1213
}


1214
void Compiler::RecordFunctionCompilation(Logger::LogEventsAndTags tag,
1215 1216 1217 1218 1219
                                         CompilationInfo* info,
                                         Handle<SharedFunctionInfo> shared) {
  // SharedFunctionInfo is passed separately, because if CompilationInfo
  // was created using Script object, it will not have it.

1220 1221 1222
  // Log the code generation. If source information is available include
  // script name and line number. Check explicitly whether logging is
  // enabled as finding the line number is not free.
1223
  if (info->isolate()->logger()->is_logging_code_events() ||
1224
      info->isolate()->cpu_profiler()->is_profiling()) {
1225 1226
    Handle<Script> script = info->script();
    Handle<Code> code = info->code();
1227
    if (*code == info->isolate()->builtins()->builtin(Builtins::kLazyCompile))
1228
      return;
1229
    int line_num = GetScriptLineNumber(script, shared->start_position()) + 1;
1230 1231
    int column_num =
        GetScriptColumnNumber(script, shared->start_position()) + 1;
1232
    USE(line_num);
1233
    if (script->name()->IsString()) {
1234 1235
      PROFILE(info->isolate(),
              CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
1236
                              *code,
1237
                              *shared,
1238
                              info,
1239
                              String::cast(script->name()),
1240 1241
                              line_num,
                              column_num));
1242
    } else {
1243 1244
      PROFILE(info->isolate(),
              CodeCreateEvent(Logger::ToNativeByScript(tag, *script),
1245
                              *code,
1246
                              *shared,
1247
                              info,
1248
                              info->isolate()->heap()->empty_string(),
1249 1250
                              line_num,
                              column_num));
1251 1252
    }
  }
1253

1254
  GDBJIT(AddCode(Handle<String>(shared->DebugName()),
1255
                 Handle<Script>(info->script()),
1256 1257
                 Handle<Code>(info->code()),
                 info));
1258 1259
}

1260

1261 1262
CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
    : name_(name), info_(info), zone_(info->isolate()) {
1263
  if (FLAG_hydrogen_stats) {
1264
    info_zone_start_allocation_size_ = info->zone()->allocation_size();
1265
    timer_.Start();
1266 1267 1268 1269 1270 1271
  }
}


CompilationPhase::~CompilationPhase() {
  if (FLAG_hydrogen_stats) {
1272 1273
    unsigned size = zone()->allocation_size();
    size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1274
    isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1275 1276 1277 1278 1279
  }
}


bool CompilationPhase::ShouldProduceTraceOutput() const {
1280 1281
  // Trace if the appropriate trace flag is set and the phase name's first
  // character is in the FLAG_trace_phase command line parameter.
1282
  AllowHandleDereference allow_deref;
1283 1284 1285 1286
  bool tracing_on = info()->IsStub()
      ? FLAG_trace_hydrogen_stubs
      : (FLAG_trace_hydrogen &&
         info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1287 1288
  return (tracing_on &&
      OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1289 1290
}

1291
} }  // namespace v8::internal