compiler.cc 57.6 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#include "src/compiler.h"
6

7 8
#include <algorithm>

9
#include "src/ast-numbering.h"
10 11 12
#include "src/bootstrapper.h"
#include "src/codegen.h"
#include "src/compilation-cache.h"
13
#include "src/compiler/pipeline.h"
14 15 16 17 18 19 20 21
#include "src/cpu-profiler.h"
#include "src/debug.h"
#include "src/deoptimizer.h"
#include "src/full-codegen.h"
#include "src/gdb-jit.h"
#include "src/hydrogen.h"
#include "src/lithium.h"
#include "src/liveedit.h"
22
#include "src/messages.h"
23
#include "src/parser.h"
24
#include "src/prettyprinter.h"
25 26 27 28 29
#include "src/rewriter.h"
#include "src/runtime-profiler.h"
#include "src/scanner-character-streams.h"
#include "src/scopeinfo.h"
#include "src/scopes.h"
30
#include "src/snapshot/serialize.h"
31
#include "src/typing.h"
32
#include "src/vm-state-inl.h"
33

34 35
namespace v8 {
namespace internal {
36

37 38 39 40 41 42 43 44 45 46 47
std::ostream& operator<<(std::ostream& os, const SourcePosition& p) {
  if (p.IsUnknown()) {
    return os << "<?>";
  } else if (FLAG_hydrogen_track_positions) {
    return os << "<" << p.inlining_id() << ":" << p.position() << ">";
  } else {
    return os << "<0:" << p.raw() << ">";
  }
}


48 49 50 51
#define PARSE_INFO_GETTER(type, name)  \
  type CompilationInfo::name() const { \
    CHECK(parse_info());               \
    return parse_info()->name();       \
52 53 54
  }


55 56 57 58
#define PARSE_INFO_GETTER_WITH_DEFAULT(type, name, def) \
  type CompilationInfo::name() const {                  \
    return parse_info() ? parse_info()->name() : def;   \
  }
59 60


61 62 63 64
PARSE_INFO_GETTER(Handle<Script>, script)
PARSE_INFO_GETTER(bool, is_eval)
PARSE_INFO_GETTER(bool, is_native)
PARSE_INFO_GETTER(bool, is_module)
svenpanne's avatar
svenpanne committed
65
PARSE_INFO_GETTER_WITH_DEFAULT(LanguageMode, language_mode, STRICT)
66 67 68 69 70 71 72 73 74 75 76
PARSE_INFO_GETTER_WITH_DEFAULT(Handle<JSFunction>, closure,
                               Handle<JSFunction>::null())
PARSE_INFO_GETTER(FunctionLiteral*, function)
PARSE_INFO_GETTER_WITH_DEFAULT(Scope*, scope, nullptr)
PARSE_INFO_GETTER(Handle<Context>, context)
PARSE_INFO_GETTER(Handle<SharedFunctionInfo>, shared_info)

#undef PARSE_INFO_GETTER
#undef PARSE_INFO_GETTER_WITH_DEFAULT


77 78 79 80 81 82 83 84 85 86 87 88
// Exactly like a CompilationInfo, except being allocated via {new} and it also
// creates and enters a Zone on construction and deallocates it on destruction.
class CompilationInfoWithZone : public CompilationInfo {
 public:
  explicit CompilationInfoWithZone(Handle<JSFunction> function)
      : CompilationInfo(new ParseInfo(&zone_, function)) {}

  // Virtual destructor because a CompilationInfoWithZone has to exit the
  // zone scope and get rid of dependent maps even when the destructor is
  // called when cast as a CompilationInfo.
  virtual ~CompilationInfoWithZone() {
    DisableFutureOptimization();
89
    dependencies()->Rollback();
90 91 92 93 94 95 96 97 98
    delete parse_info_;
    parse_info_ = nullptr;
  }

 private:
  Zone zone_;
};


99 100
bool CompilationInfo::has_shared_info() const {
  return parse_info_ && !parse_info_->shared_info().is_null();
101 102 103
}


104
CompilationInfo::CompilationInfo(ParseInfo* parse_info)
105 106
    : CompilationInfo(parse_info, nullptr, BASE, parse_info->isolate(),
                      parse_info->zone()) {
107 108 109 110 111 112 113 114
  // Compiling for the snapshot typically results in different code than
  // compiling later on. This means that code recompiled with deoptimization
  // support won't be "equivalent" (as defined by SharedFunctionInfo::
  // EnableDeoptimizationSupport), so it will replace the old code and all
  // its type feedback. To avoid this, always compile functions in the snapshot
  // with deoptimization support.
  if (isolate_->serializer_enabled()) EnableDeoptimizationSupport();

115
  if (isolate_->debug()->is_active()) MarkAsDebug();
116
  if (FLAG_context_specialization) MarkAsContextSpecializing();
117
  if (FLAG_turbo_inlining) MarkAsInliningEnabled();
118
  if (FLAG_turbo_source_positions) MarkAsSourcePositionsEnabled();
119
  if (FLAG_turbo_splitting) MarkAsSplittingEnabled();
120
  if (FLAG_turbo_types) MarkAsTypingEnabled();
121

122
  if (has_shared_info() && shared_info()->is_compiled()) {
123 124
    // We should initialize the CompilationInfo feedback vector from the
    // passed in shared info, rather than creating a new one.
125 126
    feedback_vector_ = Handle<TypeFeedbackVector>(
        shared_info()->feedback_vector(), parse_info->isolate());
127
  }
128 129 130
}


131 132 133 134 135 136 137 138 139 140 141 142 143 144
CompilationInfo::CompilationInfo(CodeStub* stub, Isolate* isolate, Zone* zone)
    : CompilationInfo(nullptr, stub, STUB, isolate, zone) {}


CompilationInfo::CompilationInfo(ParseInfo* parse_info, CodeStub* code_stub,
                                 Mode mode, Isolate* isolate, Zone* zone)
    : parse_info_(parse_info),
      isolate_(isolate),
      flags_(0),
      code_stub_(code_stub),
      mode_(mode),
      osr_ast_id_(BailoutId::None()),
      zone_(zone),
      deferred_handles_(nullptr),
145
      dependencies_(isolate, zone),
146 147 148 149 150
      bailout_reason_(kNoReason),
      prologue_offset_(Code::kPrologueOffsetNotSet),
      no_frame_ranges_(isolate->cpu_profiler()->is_profiling()
                           ? new List<OffsetRange>(2)
                           : nullptr),
151 152
      track_positions_(FLAG_hydrogen_track_positions ||
                       isolate->cpu_profiler()->is_profiling()),
153 154 155
      opt_count_(has_shared_info() ? shared_info()->opt_count() : 0),
      parameter_count_(0),
      optimization_id_(-1),
156
      osr_expr_stack_height_(0) {}
157 158


159
CompilationInfo::~CompilationInfo() {
160
  DisableFutureOptimization();
161
  delete deferred_handles_;
162
  delete no_frame_ranges_;
163 164 165
#ifdef DEBUG
  // Check that no dependent maps have been added or added dependent maps have
  // been rolled back or committed.
166
  DCHECK(dependencies()->IsEmpty());
167 168 169 170
#endif  // DEBUG
}


171
int CompilationInfo::num_parameters() const {
172
  return has_scope() ? scope()->num_parameters() : parameter_count_;
173 174 175
}


176 177 178 179 180 181 182 183
int CompilationInfo::num_parameters_including_this() const {
  return num_parameters() + (is_this_defined() ? 1 : 0);
}


bool CompilationInfo::is_this_defined() const { return !IsStub(); }


184
int CompilationInfo::num_heap_slots() const {
185
  return has_scope() ? scope()->num_heap_slots() : 0;
186 187 188 189
}


Code::Flags CompilationInfo::flags() const {
190 191 192 193 194
  return code_stub() != nullptr
             ? Code::ComputeFlags(
                   code_stub()->GetCodeKind(), code_stub()->GetICState(),
                   code_stub()->GetExtraICState(), code_stub()->GetStubType())
             : Code::ComputeFlags(Code::OPTIMIZED_FUNCTION);
195 196 197
}


198 199 200 201
// Primitive functions are unlikely to be picked up by the stack-walking
// profiler, so they trigger their own optimization when they're called
// for the SharedFunctionInfo::kCallsUntilPrimitiveOptimization-th time.
bool CompilationInfo::ShouldSelfOptimize() {
202 203 204 205
  return FLAG_crankshaft && !function()->flags()->Contains(kDontSelfOptimize) &&
         !function()->dont_optimize() &&
         function()->scope()->AllowsLazyCompilation() &&
         (!has_shared_info() || !shared_info()->optimization_disabled());
206 207
}

208

209
void CompilationInfo::EnsureFeedbackVector() {
210 211
  if (feedback_vector_.is_null() ||
      feedback_vector_->SpecDiffersFrom(function()->feedback_vector_spec())) {
212
    feedback_vector_ = isolate()->factory()->NewTypeFeedbackVector(
213
        function()->feedback_vector_spec());
214
  }
215 216 217
}


218
bool CompilationInfo::is_simple_parameter_list() {
219
  return scope()->is_simple_parameter_list();
220
}
221 222


223
bool CompilationInfo::MayUseThis() const {
224
  return scope()->has_this_declaration() && scope()->receiver()->is_used();
225 226 227
}


228
int CompilationInfo::TraceInlinedFunction(Handle<SharedFunctionInfo> shared,
229 230
                                          SourcePosition position,
                                          int parent_id) {
231
  DCHECK(track_positions_);
232

233
  int inline_id = static_cast<int>(inlined_function_infos_.size());
234 235 236 237 238 239
  InlinedFunctionInfo info(parent_id, position, UnboundScript::kNoScriptId,
      shared->start_position());
  if (!shared->script()->IsUndefined()) {
    Handle<Script> script(Script::cast(shared->script()));
    info.script_id = script->id()->value();

240
    if (FLAG_hydrogen_track_positions && !script->source()->IsUndefined()) {
241 242 243 244 245 246 247 248 249 250 251 252 253
      CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
      OFStream os(tracing_scope.file());
      os << "--- FUNCTION SOURCE (" << shared->DebugName()->ToCString().get()
         << ") id{" << optimization_id() << "," << inline_id << "} ---\n";
      {
        DisallowHeapAllocation no_allocation;
        int start = shared->start_position();
        int len = shared->end_position() - start;
        String::SubStringRange source(String::cast(script->source()), start,
                                      len);
        for (const auto& c : source) {
          os << AsReversiblyEscapedUC16(c);
        }
254
      }
255 256

      os << "\n--- END ---\n";
257 258 259
    }
  }

260
  inlined_function_infos_.push_back(info);
261

262
  if (FLAG_hydrogen_track_positions && inline_id != 0) {
263 264 265
    CodeTracer::Scope tracing_scope(isolate()->GetCodeTracer());
    OFStream os(tracing_scope.file());
    os << "INLINE (" << shared->DebugName()->ToCString().get() << ") id{"
266 267
       << optimization_id() << "," << inline_id << "} AS " << inline_id
       << " AT " << position << std::endl;
268 269 270 271
  }

  return inline_id;
}
272 273


274 275
void CompilationInfo::LogDeoptCallPosition(int pc_offset, int inlining_id) {
  if (!track_positions_ || IsStub()) return;
276 277
  DCHECK_LT(static_cast<size_t>(inlining_id), inlined_function_infos_.size());
  inlined_function_infos_.at(inlining_id).deopt_pc_offsets.push_back(pc_offset);
278 279 280
}


281 282 283 284 285 286 287 288
Handle<Code> CompilationInfo::GenerateCodeStub() {
  // Run a "mini pipeline", extracted from compiler.cc.
  CHECK(Parser::ParseStatic(parse_info()));
  CHECK(Compiler::Analyze(parse_info()));
  return compiler::Pipeline(this).GenerateCode();
}


289
class HOptimizedGraphBuilderWithPositions: public HOptimizedGraphBuilder {
290
 public:
291
  explicit HOptimizedGraphBuilderWithPositions(CompilationInfo* info)
292 293 294
      : HOptimizedGraphBuilder(info) {
  }

295
#define DEF_VISIT(type)                                      \
296
  void Visit##type(type* node) override {                    \
297 298 299 300 301 302 303 304 305
    SourcePosition old_position = SourcePosition::Unknown(); \
    if (node->position() != RelocInfo::kNoPosition) {        \
      old_position = source_position();                      \
      SetSourcePosition(node->position());                   \
    }                                                        \
    HOptimizedGraphBuilder::Visit##type(node);               \
    if (!old_position.IsUnknown()) {                         \
      set_source_position(old_position);                     \
    }                                                        \
306 307 308 309
  }
  EXPRESSION_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT

310
#define DEF_VISIT(type)                                      \
311
  void Visit##type(type* node) override {                    \
312 313 314 315 316 317 318 319 320
    SourcePosition old_position = SourcePosition::Unknown(); \
    if (node->position() != RelocInfo::kNoPosition) {        \
      old_position = source_position();                      \
      SetSourcePosition(node->position());                   \
    }                                                        \
    HOptimizedGraphBuilder::Visit##type(node);               \
    if (!old_position.IsUnknown()) {                         \
      set_source_position(old_position);                     \
    }                                                        \
321 322 323 324
  }
  STATEMENT_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT

325
#define DEF_VISIT(type)                        \
326
  void Visit##type(type* node) override {      \
327
    HOptimizedGraphBuilder::Visit##type(node); \
328 329 330 331 332 333
  }
  DECLARATION_NODE_LIST(DEF_VISIT)
#undef DEF_VISIT
};


334
OptimizedCompileJob::Status OptimizedCompileJob::CreateGraph() {
335 336
  DCHECK(info()->IsOptimizing());
  DCHECK(!info()->IsCompilingForDebugging());
337

338
  // Do not use Crankshaft/TurboFan if we need to be able to set break points.
339
  if (isolate()->debug()->has_break_points()) {
340
    return RetryOptimization(kDebuggerHasBreakPoints);
341
  }
342

343
  // Limit the number of times we try to optimize functions.
344
  const int kMaxOptCount =
345
      FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
346
  if (info()->opt_count() > kMaxOptCount) {
347
    return AbortOptimization(kOptimizedTooManyTimes);
348 349
  }

350
  // Check the whitelist for Crankshaft.
351
  if (!info()->closure()->PassesFilter(FLAG_hydrogen_filter)) {
352
    return AbortOptimization(kHydrogenFilter);
353 354
  }

355
  // Optimization requires a version of fullcode with deoptimization support.
356
  // Recompile the unoptimized version of the code if the current version
357 358 359
  // doesn't have deoptimization support already.
  // Otherwise, if we are gathering compilation time and space statistics
  // for hydrogen, gather baseline statistics for a fullcode compilation.
360
  bool should_recompile = !info()->shared_info()->has_deoptimization_support();
361
  if (should_recompile || FLAG_hydrogen_stats) {
362
    base::ElapsedTimer timer;
363
    if (FLAG_hydrogen_stats) {
364
      timer.Start();
365
    }
366 367
    if (!Compiler::EnsureDeoptimizationSupport(info())) {
      return SetLastStatus(FAILED);
368
    }
369
    if (FLAG_hydrogen_stats) {
370
      isolate()->GetHStatistics()->IncrementFullCodeGen(timer.Elapsed());
371
    }
372 373
  }

374
  DCHECK(info()->shared_info()->has_deoptimization_support());
375

376
  // Check the enabling conditions for TurboFan.
377
  bool dont_crankshaft = info()->shared_info()->dont_crankshaft();
378
  if (((FLAG_turbo_asm && info()->shared_info()->asm_function()) ||
379
       (dont_crankshaft && strcmp(FLAG_turbo_filter, "~~") == 0) ||
380 381
       info()->closure()->PassesFilter(FLAG_turbo_filter)) &&
      (FLAG_turbo_osr || !info()->is_osr())) {
382
    // Use TurboFan for the compilation.
383 384 385
    if (FLAG_trace_opt) {
      OFStream os(stdout);
      os << "[compiling method " << Brief(*info()->closure())
386 387 388
         << " using TurboFan";
      if (info()->is_osr()) os << " OSR";
      os << "]" << std::endl;
389
    }
390 391 392

    if (info()->shared_info()->asm_function()) {
      info()->MarkAsContextSpecializing();
393 394
    } else if (FLAG_turbo_type_feedback) {
      info()->MarkAsTypeFeedbackEnabled();
395
      info()->EnsureFeedbackVector();
396
    }
397 398 399 400
    if (!info()->shared_info()->asm_function() ||
        FLAG_turbo_asm_deoptimization) {
      info()->MarkAsDeoptimizationEnabled();
    }
401

402
    Timer t(this, &time_taken_to_create_graph_);
403 404
    compiler::Pipeline pipeline(info());
    pipeline.GenerateCode();
405 406 407
    if (!info()->code().is_null()) {
      return SetLastStatus(SUCCEEDED);
    }
408 409
  }

410
  if (!isolate()->use_crankshaft() || dont_crankshaft) {
411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426
    // Crankshaft is entirely disabled.
    return SetLastStatus(FAILED);
  }

  Scope* scope = info()->scope();
  if (LUnallocated::TooManyParameters(scope->num_parameters())) {
    // Crankshaft would require too many Lithium operands.
    return AbortOptimization(kTooManyParameters);
  }

  if (info()->is_osr() &&
      LUnallocated::TooManyParametersOrStackSlots(scope->num_parameters(),
                                                  scope->num_stack_slots())) {
    // Crankshaft would require too many Lithium operands.
    return AbortOptimization(kTooManyParametersLocals);
  }
427

428 429 430 431 432
  if (scope->HasIllegalRedeclaration()) {
    // Crankshaft cannot handle illegal redeclarations.
    return AbortOptimization(kFunctionWithIllegalRedeclaration);
  }

433 434 435
  if (FLAG_trace_opt) {
    OFStream os(stdout);
    os << "[compiling method " << Brief(*info()->closure())
436 437 438
       << " using Crankshaft";
    if (info()->is_osr()) os << " OSR";
    os << "]" << std::endl;
439 440
  }

441
  if (FLAG_trace_hydrogen) {
442
    isolate()->GetHTracer()->TraceCompilation(info());
443
  }
444 445

  // Type-check the function.
446
  AstTyper::Run(info());
447

448 449 450 451 452 453 454
  // Optimization could have been disabled by the parser. Note that this check
  // is only needed because the Hydrogen graph builder is missing some bailouts.
  if (info()->shared_info()->optimization_disabled()) {
    return AbortOptimization(
        info()->shared_info()->disable_optimization_reason());
  }

455 456 457 458
  graph_builder_ = (info()->is_tracking_positions() || FLAG_trace_ic)
                       ? new (info()->zone())
                             HOptimizedGraphBuilderWithPositions(info())
                       : new (info()->zone()) HOptimizedGraphBuilder(info());
459 460

  Timer t(this, &time_taken_to_create_graph_);
461 462
  graph_ = graph_builder_->CreateGraph();

463
  if (isolate()->has_pending_exception()) {
464
    return SetLastStatus(FAILED);
465 466
  }

467
  if (graph_ == NULL) return SetLastStatus(BAILED_OUT);
468

469
  if (info()->dependencies()->HasAborted()) {
470 471
    // Dependency has changed during graph creation. Let's try again later.
    return RetryOptimization(kBailedOutDueToDependencyChange);
472 473
  }

474 475 476
  return SetLastStatus(SUCCEEDED);
}

477

478
OptimizedCompileJob::Status OptimizedCompileJob::OptimizeGraph() {
479 480 481
  DisallowHeapAllocation no_allocation;
  DisallowHandleAllocation no_handles;
  DisallowHandleDereference no_deref;
482
  DisallowCodeDependencyChange no_dependency_change;
483

484
  DCHECK(last_status() == SUCCEEDED);
485 486 487 488 489
  // TODO(turbofan): Currently everything is done in the first phase.
  if (!info()->code().is_null()) {
    return last_status();
  }

490
  Timer t(this, &time_taken_to_optimize_);
491
  DCHECK(graph_ != NULL);
492
  BailoutReason bailout_reason = kNoReason;
493 494

  if (graph_->Optimize(&bailout_reason)) {
495
    chunk_ = LChunk::NewChunk(graph_);
496 497 498
    if (chunk_ != NULL) return SetLastStatus(SUCCEEDED);
  } else if (bailout_reason != kNoReason) {
    graph_builder_->Bailout(bailout_reason);
499
  }
500

501
  return SetLastStatus(BAILED_OUT);
502 503 504
}


505
OptimizedCompileJob::Status OptimizedCompileJob::GenerateCode() {
506
  DCHECK(last_status() == SUCCEEDED);
507 508
  // TODO(turbofan): Currently everything is done in the first phase.
  if (!info()->code().is_null()) {
509
    info()->dependencies()->Commit(info()->code());
510
    if (info()->is_deoptimization_enabled()) {
511 512
      info()->parse_info()->context()->native_context()->AddOptimizedCode(
          *info()->code());
513
    }
514 515 516 517
    RecordOptimizationStats();
    return last_status();
  }

518
  DCHECK(!info()->dependencies()->HasAborted());
519
  DisallowCodeDependencyChange no_dependency_change;
520
  DisallowJavascriptExecution no_js(isolate());
521 522
  {  // Scope for timer.
    Timer timer(this, &time_taken_to_codegen_);
523 524
    DCHECK(chunk_ != NULL);
    DCHECK(graph_ != NULL);
525 526 527 528
    // Deferred handles reference objects that were accessible during
    // graph creation.  To make sure that we don't encounter inconsistencies
    // between graph creation and code generation, we disallow accessing
    // objects through deferred handles during the latter, with exceptions.
529
    DisallowDeferredHandleDereference no_deferred_handle_deref;
530
    Handle<Code> optimized_code = chunk_->Codegen();
531
    if (optimized_code.is_null()) {
532
      if (info()->bailout_reason() == kNoReason) {
533
        return AbortOptimization(kCodeGenerationFailed);
534
      }
535
      return SetLastStatus(BAILED_OUT);
536 537
    }
    info()->SetCode(optimized_code);
538
  }
539
  RecordOptimizationStats();
540 541
  // Add to the weak list of optimized code objects.
  info()->context()->native_context()->AddOptimizedCode(*info()->code());
542
  return SetLastStatus(SUCCEEDED);
543 544 545
}


546 547 548 549 550 551
void OptimizedCompileJob::RecordOptimizationStats() {
  Handle<JSFunction> function = info()->closure();
  if (!function->IsOptimized()) {
    // Concurrent recompilation and OSR may race.  Increment only once.
    int opt_count = function->shared()->opt_count();
    function->shared()->set_opt_count(opt_count + 1);
552
  }
553 554 555 556 557 558 559 560
  double ms_creategraph = time_taken_to_create_graph_.InMillisecondsF();
  double ms_optimize = time_taken_to_optimize_.InMillisecondsF();
  double ms_codegen = time_taken_to_codegen_.InMillisecondsF();
  if (FLAG_trace_opt) {
    PrintF("[optimizing ");
    function->ShortPrint();
    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
           ms_codegen);
561
  }
562 563 564 565
  if (FLAG_trace_opt_stats) {
    static double compilation_time = 0.0;
    static int compiled_functions = 0;
    static int code_size = 0;
566

567 568 569 570 571 572 573 574 575 576 577 578 579
    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
    compiled_functions++;
    code_size += function->shared()->SourceSize();
    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
           compiled_functions,
           code_size,
           compilation_time);
  }
  if (FLAG_hydrogen_stats) {
    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_create_graph_,
                                                    time_taken_to_optimize_,
                                                    time_taken_to_codegen_);
  }
580 581 582
}


583 584 585 586 587 588 589 590 591 592
// Sets the expected number of properties based on estimate from compiler.
void SetExpectedNofPropertiesFromEstimate(Handle<SharedFunctionInfo> shared,
                                          int estimate) {
  // If no properties are added in the constructor, they are more likely
  // to be added later.
  if (estimate == 0) estimate = 2;

  // TODO(yangguo): check whether those heuristics are still up-to-date.
  // We do not shrink objects that go into a snapshot (yet), so we adjust
  // the estimate conservatively.
593
  if (shared->GetIsolate()->serializer_enabled()) {
594
    estimate += 2;
jochen's avatar
jochen committed
595
  } else {
596 597 598 599 600 601 602 603 604
    // Inobject slack tracking will reclaim redundant inobject space later,
    // so we can afford to adjust the estimate generously.
    estimate += 8;
  }

  shared->set_expected_nof_properties(estimate);
}


605 606 607 608 609 610 611 612
static void MaybeDisableOptimization(Handle<SharedFunctionInfo> shared_info,
                                     BailoutReason bailout_reason) {
  if (bailout_reason != kNoReason) {
    shared_info->DisableOptimization(bailout_reason);
  }
}


613 614 615 616 617 618 619 620 621 622 623
static void RecordFunctionCompilation(Logger::LogEventsAndTags tag,
                                      CompilationInfo* info,
                                      Handle<SharedFunctionInfo> shared) {
  // SharedFunctionInfo is passed separately, because if CompilationInfo
  // was created using Script object, it will not have it.

  // Log the code generation. If source information is available include
  // script name and line number. Check explicitly whether logging is
  // enabled as finding the line number is not free.
  if (info->isolate()->logger()->is_logging_code_events() ||
      info->isolate()->cpu_profiler()->is_profiling()) {
624
    Handle<Script> script = info->parse_info()->script();
625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642
    Handle<Code> code = info->code();
    if (code.is_identical_to(info->isolate()->builtins()->CompileLazy())) {
      return;
    }
    int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
    int column_num =
        Script::GetColumnNumber(script, shared->start_position()) + 1;
    String* script_name = script->name()->IsString()
                              ? String::cast(script->name())
                              : info->isolate()->heap()->empty_string();
    Logger::LogEventsAndTags log_tag = Logger::ToNativeByScript(tag, *script);
    PROFILE(info->isolate(),
            CodeCreateEvent(log_tag, *code, *shared, info, script_name,
                            line_num, column_num));
  }
}


643
static bool CompileUnoptimizedCode(CompilationInfo* info) {
644
  DCHECK(AllowCompilation::IsAllowed(info->isolate()));
645 646
  if (!Compiler::Analyze(info->parse_info()) ||
      !FullCodeGenerator::MakeCode(info)) {
647 648 649
    Isolate* isolate = info->isolate();
    if (!isolate->has_pending_exception()) isolate->StackOverflow();
    return false;
650
  }
651 652
  return true;
}
653

654

655 656
MUST_USE_RESULT static MaybeHandle<Code> GetUnoptimizedCodeCommon(
    CompilationInfo* info) {
657 658
  VMState<COMPILER> state(info->isolate());
  PostponeInterruptsScope postpone(info->isolate());
659 660

  // Parse and update CompilationInfo with the results.
661
  if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>();
662 663
  Handle<SharedFunctionInfo> shared = info->shared_info();
  FunctionLiteral* lit = info->function();
664
  shared->set_language_mode(lit->language_mode());
665
  SetExpectedNofPropertiesFromEstimate(shared, lit->expected_property_count());
666
  MaybeDisableOptimization(shared, lit->dont_optimize_reason());
667

668
  // Compile unoptimized code.
669
  if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
670 671

  CHECK_EQ(Code::FUNCTION, info->code()->kind());
672
  RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info, shared);
673 674 675

  // Update the shared function info with the scope info. Allocating the
  // ScopeInfo object may cause a GC.
676 677
  Handle<ScopeInfo> scope_info =
      ScopeInfo::Create(info->isolate(), info->zone(), info->scope());
678 679 680 681 682 683
  shared->set_scope_info(*scope_info);

  // Update the code and feedback vector for the shared function info.
  shared->ReplaceCode(*info->code());
  shared->set_feedback_vector(*info->feedback_vector());

684 685
  return info->code();
}
686 687


688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735
MUST_USE_RESULT static MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
    Handle<JSFunction> function, BailoutId osr_ast_id) {
  if (FLAG_cache_optimized_code) {
    Handle<SharedFunctionInfo> shared(function->shared());
    // Bound functions are not cached.
    if (shared->bound()) return MaybeHandle<Code>();
    DisallowHeapAllocation no_gc;
    int index = shared->SearchOptimizedCodeMap(
        function->context()->native_context(), osr_ast_id);
    if (index > 0) {
      if (FLAG_trace_opt) {
        PrintF("[found optimized code for ");
        function->ShortPrint();
        if (!osr_ast_id.IsNone()) {
          PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
        }
        PrintF("]\n");
      }
      FixedArray* literals = shared->GetLiteralsFromOptimizedCodeMap(index);
      if (literals != NULL) function->set_literals(literals);
      return Handle<Code>(shared->GetCodeFromOptimizedCodeMap(index));
    }
  }
  return MaybeHandle<Code>();
}


static void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
  Handle<Code> code = info->code();
  if (code->kind() != Code::OPTIMIZED_FUNCTION) return;  // Nothing to do.

  // Context specialization folds-in the context, so no sharing can occur.
  if (code->is_turbofanned() && info->is_context_specializing()) return;

  // Cache optimized code.
  if (FLAG_cache_optimized_code) {
    Handle<JSFunction> function = info->closure();
    Handle<SharedFunctionInfo> shared(function->shared());
    // Do not cache bound functions.
    if (shared->bound()) return;
    Handle<FixedArray> literals(function->literals());
    Handle<Context> native_context(function->context()->native_context());
    SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
                                              literals, info->osr_ast_id());
  }
}


736 737 738
static bool Renumber(ParseInfo* parse_info) {
  if (!AstNumbering::Renumber(parse_info->isolate(), parse_info->zone(),
                              parse_info->function())) {
739 740
    return false;
  }
741 742 743 744 745
  Handle<SharedFunctionInfo> shared_info = parse_info->shared_info();
  if (!shared_info.is_null()) {
    FunctionLiteral* lit = parse_info->function();
    shared_info->set_ast_node_count(lit->ast_node_count());
    MaybeDisableOptimization(shared_info, lit->dont_optimize_reason());
746
    shared_info->set_dont_crankshaft(lit->flags()->Contains(kDontCrankshaft));
747
    shared_info->set_dont_cache(lit->flags()->Contains(kDontCache));
748 749 750 751 752
  }
  return true;
}


753
bool Compiler::Analyze(ParseInfo* info) {
754
  DCHECK(info->function() != NULL);
755 756
  if (!Rewriter::Rewrite(info)) return false;
  if (!Scope::Analyze(info)) return false;
757
  if (!Renumber(info)) return false;
758 759 760 761 762
  DCHECK(info->scope() != NULL);
  return true;
}


763
bool Compiler::ParseAndAnalyze(ParseInfo* info) {
764
  if (!Parser::ParseStatic(info)) return false;
765 766 767 768
  return Compiler::Analyze(info);
}


769
static bool GetOptimizedCodeNow(CompilationInfo* info) {
770
  if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
771 772 773 774

  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());

  OptimizedCompileJob job(info);
775 776 777 778 779 780 781 782 783 784
  if (job.CreateGraph() != OptimizedCompileJob::SUCCEEDED ||
      job.OptimizeGraph() != OptimizedCompileJob::SUCCEEDED ||
      job.GenerateCode() != OptimizedCompileJob::SUCCEEDED) {
    if (FLAG_trace_opt) {
      PrintF("[aborted optimizing ");
      info->closure()->ShortPrint();
      PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
    }
    return false;
  }
785 786 787 788 789 790 791 792 793 794 795 796

  // Success!
  DCHECK(!info->isolate()->has_pending_exception());
  InsertCodeIntoOptimizedCodeMap(info);
  RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info,
                            info->shared_info());
  return true;
}


static bool GetOptimizedCodeLater(CompilationInfo* info) {
  Isolate* isolate = info->isolate();
797
  if (!isolate->optimizing_compile_dispatcher()->IsQueueAvailable()) {
798 799
    if (FLAG_trace_concurrent_recompilation) {
      PrintF("  ** Compilation queue full, will retry optimizing ");
800
      info->closure()->ShortPrint();
801 802 803 804 805 806
      PrintF(" later.\n");
    }
    return false;
  }

  CompilationHandleScope handle_scope(info);
807 808 809 810 811
  if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;

  // Reopen handles in the new CompilationHandleScope.
  info->ReopenHandlesInNewHandleScope();
  info->parse_info()->ReopenHandlesInNewHandleScope();
812 813 814 815 816 817

  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());

  OptimizedCompileJob* job = new (info->zone()) OptimizedCompileJob(info);
  OptimizedCompileJob::Status status = job->CreateGraph();
  if (status != OptimizedCompileJob::SUCCEEDED) return false;
818
  isolate->optimizing_compile_dispatcher()->QueueForOptimization(job);
819 820 821

  if (FLAG_trace_concurrent_recompilation) {
    PrintF("  ** Queued ");
822
    info->closure()->ShortPrint();
823 824 825 826 827 828 829 830 831 832
    if (info->is_osr()) {
      PrintF(" for concurrent OSR at %d.\n", info->osr_ast_id().ToInt());
    } else {
      PrintF(" for concurrent optimization.\n");
    }
  }
  return true;
}


833
MaybeHandle<Code> Compiler::GetUnoptimizedCode(Handle<JSFunction> function) {
834 835
  DCHECK(!function->GetIsolate()->has_pending_exception());
  DCHECK(!function->is_compiled());
836 837 838
  if (function->shared()->is_compiled()) {
    return Handle<Code>(function->shared()->code());
  }
839

840
  CompilationInfoWithZone info(function);
841 842 843 844
  Handle<Code> result;
  ASSIGN_RETURN_ON_EXCEPTION(info.isolate(), result,
                             GetUnoptimizedCodeCommon(&info),
                             Code);
845 846 847 848 849
  return result;
}


MaybeHandle<Code> Compiler::GetLazyCode(Handle<JSFunction> function) {
850 851
  Isolate* isolate = function->GetIsolate();
  DCHECK(!isolate->has_pending_exception());
852
  DCHECK(!function->is_compiled());
853
  AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
854 855 856
  // If the debugger is active, do not compile with turbofan unless we can
  // deopt from turbofan code.
  if (FLAG_turbo_asm && function->shared()->asm_function() &&
857
      (FLAG_turbo_asm_deoptimization || !isolate->debug()->is_active()) &&
858
      !FLAG_turbo_osr) {
859 860
    CompilationInfoWithZone info(function);

861 862
    VMState<COMPILER> state(isolate);
    PostponeInterruptsScope postpone(isolate);
863

864
    info.SetOptimizing(BailoutId::None(), handle(function->shared()->code()));
865

866 867 868 869
    if (GetOptimizedCodeNow(&info)) {
      DCHECK(function->shared()->is_compiled());
      return info.code();
    }
870 871 872
    // We have failed compilation. If there was an exception clear it so that
    // we can compile unoptimized code.
    if (isolate->has_pending_exception()) isolate->clear_pending_exception();
873 874
  }

875 876 877 878 879 880
  if (function->shared()->is_compiled()) {
    return Handle<Code>(function->shared()->code());
  }

  CompilationInfoWithZone info(function);
  Handle<Code> result;
881 882
  ASSIGN_RETURN_ON_EXCEPTION(isolate, result, GetUnoptimizedCodeCommon(&info),
                             Code);
883

884
  if (FLAG_always_opt) {
885 886 887 888 889 890
    Handle<Code> opt_code;
    if (Compiler::GetOptimizedCode(
            function, result,
            Compiler::NOT_CONCURRENT).ToHandle(&opt_code)) {
      result = opt_code;
    }
891
  }
892

893 894
  return result;
}
895

896

897 898
MaybeHandle<Code> Compiler::GetUnoptimizedCode(
    Handle<SharedFunctionInfo> shared) {
899 900
  DCHECK(!shared->GetIsolate()->has_pending_exception());
  DCHECK(!shared->is_compiled());
901

902 903 904
  Zone zone;
  ParseInfo parse_info(&zone, shared);
  CompilationInfo info(&parse_info);
905
  return GetUnoptimizedCodeCommon(&info);
906 907 908
}


909 910 911
bool Compiler::EnsureCompiled(Handle<JSFunction> function,
                              ClearExceptionFlag flag) {
  if (function->is_compiled()) return true;
912
  MaybeHandle<Code> maybe_code = Compiler::GetLazyCode(function);
913 914
  Handle<Code> code;
  if (!maybe_code.ToHandle(&code)) {
915 916 917 918 919 920
    if (flag == CLEAR_EXCEPTION) {
      function->GetIsolate()->clear_pending_exception();
    }
    return false;
  }
  function->ReplaceCode(*code);
921
  DCHECK(function->is_compiled());
922 923
  return true;
}
924

925

926 927 928
// TODO(turbofan): In the future, unoptimized code with deopt support could
// be generated lazily once deopt is triggered.
bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
929 930
  DCHECK(info->function() != NULL);
  DCHECK(info->scope() != NULL);
931 932
  Handle<SharedFunctionInfo> shared = info->shared_info();
  if (!shared->has_deoptimization_support()) {
933
    // TODO(titzer): just reuse the ParseInfo for the unoptimized compile.
934
    CompilationInfoWithZone unoptimized(info->closure());
935 936
    // Note that we use the same AST that we will use for generating the
    // optimized code.
937 938 939 940
    ParseInfo* parse_info = unoptimized.parse_info();
    parse_info->set_literal(info->function());
    parse_info->set_scope(info->scope());
    parse_info->set_context(info->context());
941
    unoptimized.EnableDeoptimizationSupport();
942 943 944 945 946 947 948
    // If the current code has reloc info for serialization, also include
    // reloc info for serialization for the new code, so that deopt support
    // can be added without losing IC state.
    if (shared->code()->kind() == Code::FUNCTION &&
        shared->code()->has_reloc_info_for_serialization()) {
      unoptimized.PrepareForSerializing();
    }
949 950 951 952 953 954 955 956 957
    if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;

    shared->EnableDeoptimizationSupport(*unoptimized.code());
    shared->set_feedback_vector(*unoptimized.feedback_vector());

    // The scope info might not have been set if a lazily compiled
    // function is inlined before being called for the first time.
    if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
      Handle<ScopeInfo> target_scope_info =
958
          ScopeInfo::Create(info->isolate(), info->zone(), info->scope());
959 960 961 962 963 964 965 966 967 968
      shared->set_scope_info(*target_scope_info);
    }

    // The existing unoptimized code was replaced with the new one.
    RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, &unoptimized, shared);
  }
  return true;
}


969 970 971 972 973 974 975 976 977
// Compile full code for debugging. This code will have debug break slots
// and deoptimization information. Deoptimization information is required
// in case that an optimized version of this function is still activated on
// the stack. It will also make sure that the full code is compiled with
// the same flags as the previous version, that is flags which can change
// the code generated. The current method of mapping from already compiled
// full code without debug break slots to full code with debug break slots
// depends on the generated code is otherwise exactly the same.
// If compilation fails, just keep the existing code.
978
MaybeHandle<Code> Compiler::GetDebugCode(Handle<JSFunction> function) {
979 980
  CompilationInfoWithZone info(function);
  Isolate* isolate = info.isolate();
981
  VMState<COMPILER> state(isolate);
982

983 984
  info.MarkAsDebug();

985
  DCHECK(!isolate->has_pending_exception());
986
  Handle<Code> old_code(function->shared()->code());
987 988
  DCHECK(old_code->kind() == Code::FUNCTION);
  DCHECK(!old_code->has_debug_break_slots());
989 990 991 992 993 994 995

  info.MarkCompilingForDebugging();
  if (old_code->is_compiled_optimizable()) {
    info.EnableDeoptimizationSupport();
  } else {
    info.MarkNonOptimizable();
  }
996 997 998
  MaybeHandle<Code> maybe_new_code = GetUnoptimizedCodeCommon(&info);
  Handle<Code> new_code;
  if (!maybe_new_code.ToHandle(&new_code)) {
999 1000
    isolate->clear_pending_exception();
  } else {
1001
    DCHECK_EQ(old_code->is_compiled_optimizable(),
1002 1003
              new_code->is_compiled_optimizable());
  }
1004
  return maybe_new_code;
1005 1006 1007 1008 1009
}


void Compiler::CompileForLiveEdit(Handle<Script> script) {
  // TODO(635): support extensions.
1010 1011 1012
  Zone zone;
  ParseInfo parse_info(&zone, script);
  CompilationInfo info(&parse_info);
1013
  PostponeInterruptsScope postpone(info.isolate());
1014 1015
  VMState<COMPILER> state(info.isolate());

1016 1017
  info.parse_info()->set_global();
  if (!Parser::ParseStatic(info.parse_info())) return;
1018 1019 1020

  LiveEditFunctionTracker tracker(info.isolate(), info.function());
  if (!CompileUnoptimizedCode(&info)) return;
1021
  if (info.has_shared_info()) {
1022 1023
    Handle<ScopeInfo> scope_info =
        ScopeInfo::Create(info.isolate(), info.zone(), info.scope());
1024 1025 1026 1027 1028 1029 1030 1031
    info.shared_info()->set_scope_info(*scope_info);
  }
  tracker.RecordRootFunctionInfo(info.code());
}


static Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
  Isolate* isolate = info->isolate();
1032
  PostponeInterruptsScope postpone(isolate);
1033
  DCHECK(!isolate->native_context().is_null());
1034 1035
  ParseInfo* parse_info = info->parse_info();
  Handle<Script> script = parse_info->script();
1036 1037 1038

  // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
  FixedArray* array = isolate->native_context()->embedder_data();
1039
  script->set_context_data(array->get(v8::Context::kDebugIdIndex));
1040

1041
  isolate->debug()->OnBeforeCompile(script);
1042

1043 1044
  DCHECK(parse_info->is_eval() || parse_info->is_global() ||
         parse_info->is_module());
1045

1046
  parse_info->set_toplevel();
1047

1048 1049 1050
  Handle<SharedFunctionInfo> result;

  { VMState<COMPILER> state(info->isolate());
1051
    if (parse_info->literal() == NULL) {
1052 1053
      // Parse the script if needed (if it's already parsed, function() is
      // non-NULL).
1054 1055 1056 1057 1058
      ScriptCompiler::CompileOptions options = parse_info->compile_options();
      bool parse_allow_lazy = (options == ScriptCompiler::kConsumeParserCache ||
                               String::cast(script->source())->length() >
                                   FLAG_min_preparse_length) &&
                              !Compiler::DebuggerWantsEagerCompilation(isolate);
1059

1060
      parse_info->set_allow_lazy_parsing(parse_allow_lazy);
1061
      if (!parse_allow_lazy &&
1062 1063
          (options == ScriptCompiler::kProduceParserCache ||
           options == ScriptCompiler::kConsumeParserCache)) {
1064 1065 1066 1067
        // We are going to parse eagerly, but we either 1) have cached data
        // produced by lazy parsing or 2) are asked to generate cached data.
        // Eager parsing cannot benefit from cached data, and producing cached
        // data while parsing eagerly is not implemented.
1068 1069
        parse_info->set_cached_data(nullptr);
        parse_info->set_compile_options(ScriptCompiler::kNoCompileOptions);
1070
      }
1071
      if (!Parser::ParseStatic(parse_info)) {
1072 1073
        return Handle<SharedFunctionInfo>::null();
      }
1074 1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086 1087 1088 1089 1090 1091 1092
    }

    FunctionLiteral* lit = info->function();
    LiveEditFunctionTracker live_edit_tracker(isolate, lit);

    // Measure how long it takes to do the compilation; only take the
    // rest of the function into account to avoid overlap with the
    // parsing statistics.
    HistogramTimer* rate = info->is_eval()
          ? info->isolate()->counters()->compile_eval()
          : info->isolate()->counters()->compile();
    HistogramTimerScope timer(rate);

    // Compile the code.
    if (!CompileUnoptimizedCode(info)) {
      return Handle<SharedFunctionInfo>::null();
    }

    // Allocate function.
1093
    DCHECK(!info->code().is_null());
1094
    result = isolate->factory()->NewSharedFunctionInfo(
1095
        lit->name(), lit->materialized_literal_count(), lit->kind(),
1096 1097
        info->code(),
        ScopeInfo::Create(info->isolate(), info->zone(), info->scope()),
1098
        info->feedback_vector());
1099

1100
    DCHECK_EQ(RelocInfo::kNoPosition, lit->function_token_position());
1101 1102 1103
    SharedFunctionInfo::InitFromFunctionLiteral(result, lit);
    result->set_script(*script);
    result->set_is_toplevel(true);
1104 1105 1106 1107 1108 1109 1110 1111 1112 1113 1114 1115 1116 1117 1118 1119 1120

    Handle<String> script_name = script->name()->IsString()
        ? Handle<String>(String::cast(script->name()))
        : isolate->factory()->empty_string();
    Logger::LogEventsAndTags log_tag = info->is_eval()
        ? Logger::EVAL_TAG
        : Logger::ToNativeByScript(Logger::SCRIPT_TAG, *script);

    PROFILE(isolate, CodeCreateEvent(
                log_tag, *info->code(), *result, info, *script_name));

    // Hint to the runtime system used when allocating space for initial
    // property space by setting the expected number of properties for
    // the instances of the function.
    SetExpectedNofPropertiesFromEstimate(result,
                                         lit->expected_property_count());

1121 1122
    if (!script.is_null())
      script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
1123 1124 1125 1126

    live_edit_tracker.RecordFunctionInfo(result, lit, info->zone());
  }

1127
  isolate->debug()->OnAfterCompile(script);
1128 1129 1130 1131 1132

  return result;
}


1133
MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
1134
    Handle<String> source, Handle<SharedFunctionInfo> outer_info,
1135
    Handle<Context> context, LanguageMode language_mode,
1136
    ParseRestriction restriction, int scope_position) {
1137 1138 1139 1140 1141 1142
  Isolate* isolate = source->GetIsolate();
  int source_length = source->length();
  isolate->counters()->total_eval_size()->Increment(source_length);
  isolate->counters()->total_compile_size()->Increment(source_length);

  CompilationCache* compilation_cache = isolate->compilation_cache();
1143
  MaybeHandle<SharedFunctionInfo> maybe_shared_info =
1144
      compilation_cache->LookupEval(source, outer_info, context, language_mode,
1145 1146
                                    scope_position);
  Handle<SharedFunctionInfo> shared_info;
1147

1148
  if (!maybe_shared_info.ToHandle(&shared_info)) {
1149
    Handle<Script> script = isolate->factory()->NewScript(source);
1150 1151 1152 1153 1154 1155 1156 1157
    Zone zone;
    ParseInfo parse_info(&zone, script);
    CompilationInfo info(&parse_info);
    parse_info.set_eval();
    if (context->IsNativeContext()) parse_info.set_global();
    parse_info.set_language_mode(language_mode);
    parse_info.set_parse_restriction(restriction);
    parse_info.set_context(context);
1158 1159 1160 1161 1162 1163

    Debug::RecordEvalCaller(script);

    shared_info = CompileToplevel(&info);

    if (shared_info.is_null()) {
1164
      return MaybeHandle<JSFunction>();
1165 1166 1167
    } else {
      // Explicitly disable optimization for eval code. We're not yet prepared
      // to handle eval-code in the optimizing compiler.
1168 1169 1170
      if (restriction != ONLY_SINGLE_FUNCTION_LITERAL) {
        shared_info->DisableOptimization(kEval);
      }
1171

1172
      // If caller is strict mode, the result must be in strict mode as well.
1173 1174
      DCHECK(is_sloppy(language_mode) ||
             is_strict(shared_info->language_mode()));
1175
      if (!shared_info->dont_cache()) {
1176 1177
        compilation_cache->PutEval(source, outer_info, context, shared_info,
                                   scope_position);
1178 1179 1180 1181 1182 1183 1184 1185 1186 1187 1188
      }
    }
  } else if (shared_info->ic_age() != isolate->heap()->global_ic_age()) {
    shared_info->ResetForNewContext(isolate->heap()->global_ic_age());
  }

  return isolate->factory()->NewFunctionFromSharedFunctionInfo(
      shared_info, context, NOT_TENURED);
}


1189
Handle<SharedFunctionInfo> Compiler::CompileScript(
1190
    Handle<String> source, Handle<Object> script_name, int line_offset,
1191 1192 1193
    int column_offset, ScriptOriginOptions resource_options,
    Handle<Object> source_map_url, Handle<Context> context,
    v8::Extension* extension, ScriptData** cached_data,
1194 1195
    ScriptCompiler::CompileOptions compile_options, NativesFlag natives,
    bool is_module) {
1196
  Isolate* isolate = source->GetIsolate();
1197
  if (compile_options == ScriptCompiler::kNoCompileOptions) {
1198
    cached_data = NULL;
1199 1200
  } else if (compile_options == ScriptCompiler::kProduceParserCache ||
             compile_options == ScriptCompiler::kProduceCodeCache) {
1201 1202
    DCHECK(cached_data && !*cached_data);
    DCHECK(extension == NULL);
1203
    DCHECK(!isolate->debug()->is_loaded());
1204
  } else {
1205
    DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
1206
           compile_options == ScriptCompiler::kConsumeCodeCache);
1207 1208
    DCHECK(cached_data && *cached_data);
    DCHECK(extension == NULL);
1209
  }
1210 1211 1212 1213
  int source_length = source->length();
  isolate->counters()->total_load_size()->Increment(source_length);
  isolate->counters()->total_compile_size()->Increment(source_length);

1214 1215 1216 1217 1218 1219
  // TODO(rossberg): The natives do not yet obey strong mode rules
  // (for example, some macros use '==').
  bool use_strong = FLAG_use_strong && !isolate->bootstrapper()->IsActive();
  LanguageMode language_mode =
      construct_language_mode(FLAG_use_strict, use_strong);

1220 1221 1222
  CompilationCache* compilation_cache = isolate->compilation_cache();

  // Do a lookup in the compilation cache but not for extensions.
1223
  MaybeHandle<SharedFunctionInfo> maybe_result;
1224
  Handle<SharedFunctionInfo> result;
1225
  if (extension == NULL) {
1226
    // First check per-isolate compilation cache.
1227
    maybe_result = compilation_cache->LookupScript(
1228 1229
        source, script_name, line_offset, column_offset, resource_options,
        context, language_mode);
1230
    if (maybe_result.is_null() && FLAG_serialize_toplevel &&
1231 1232
        compile_options == ScriptCompiler::kConsumeCodeCache &&
        !isolate->debug()->is_loaded()) {
1233
      // Then check cached code provided by embedder.
1234
      HistogramTimerScope timer(isolate->counters()->compile_deserialize());
1235 1236 1237
      Handle<SharedFunctionInfo> result;
      if (CodeSerializer::Deserialize(isolate, *cached_data, source)
              .ToHandle(&result)) {
1238 1239 1240
        // Promote to per-isolate compilation cache.
        DCHECK(!result->dont_cache());
        compilation_cache->PutScript(source, context, language_mode, result);
1241 1242 1243
        return result;
      }
      // Deserializer failed. Fall through to compile.
1244
    }
1245 1246
  }

1247 1248 1249 1250 1251 1252
  base::ElapsedTimer timer;
  if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
      compile_options == ScriptCompiler::kProduceCodeCache) {
    timer.Start();
  }

1253
  if (!maybe_result.ToHandle(&result)) {
1254
    // No cache entry found. Compile the script.
1255 1256

    // Create a script object describing the script to be compiled.
1257
    Handle<Script> script = isolate->factory()->NewScript(source);
1258 1259 1260
    if (natives == NATIVES_CODE) {
      script->set_type(Smi::FromInt(Script::TYPE_NATIVE));
    }
1261 1262 1263 1264 1265
    if (!script_name.is_null()) {
      script->set_name(*script_name);
      script->set_line_offset(Smi::FromInt(line_offset));
      script->set_column_offset(Smi::FromInt(column_offset));
    }
1266
    script->set_origin_options(resource_options);
1267 1268 1269
    if (!source_map_url.is_null()) {
      script->set_source_mapping_url(*source_map_url);
    }
1270 1271

    // Compile the function and add it to the cache.
1272 1273 1274
    Zone zone;
    ParseInfo parse_info(&zone, script);
    CompilationInfo info(&parse_info);
1275
    if (FLAG_harmony_modules && is_module) {
1276
      parse_info.set_module();
1277
    } else {
1278
      parse_info.set_global();
1279
    }
1280
    if (compile_options != ScriptCompiler::kNoCompileOptions) {
1281
      parse_info.set_cached_data(cached_data);
1282
    }
1283 1284 1285
    parse_info.set_compile_options(compile_options);
    parse_info.set_extension(extension);
    parse_info.set_context(context);
1286 1287
    if (FLAG_serialize_toplevel &&
        compile_options == ScriptCompiler::kProduceCodeCache) {
1288 1289
      info.PrepareForSerializing();
    }
1290

1291
    parse_info.set_language_mode(
1292
        static_cast<LanguageMode>(info.language_mode() | language_mode));
1293
    result = CompileToplevel(&info);
1294
    if (extension == NULL && !result.is_null() && !result->dont_cache()) {
1295
      compilation_cache->PutScript(source, context, language_mode, result);
1296
      if (FLAG_serialize_toplevel &&
1297
          compile_options == ScriptCompiler::kProduceCodeCache) {
1298 1299
        HistogramTimerScope histogram_timer(
            isolate->counters()->compile_serialize());
1300
        *cached_data = CodeSerializer::Serialize(isolate, result, source);
1301
        if (FLAG_profile_deserialization) {
1302 1303
          PrintF("[Compiling and serializing took %0.3f ms]\n",
                 timer.Elapsed().InMillisecondsF());
1304
        }
1305
      }
1306
    }
1307

1308
    if (result.is_null()) isolate->ReportPendingMessages();
1309
  } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1310
    result->ResetForNewContext(isolate->heap()->global_ic_age());
1311 1312 1313 1314 1315
  }
  return result;
}


1316
Handle<SharedFunctionInfo> Compiler::CompileStreamedScript(
1317 1318 1319
    Handle<Script> script, ParseInfo* parse_info, int source_length) {
  Isolate* isolate = script->GetIsolate();
  // TODO(titzer): increment the counters in caller.
1320 1321 1322
  isolate->counters()->total_load_size()->Increment(source_length);
  isolate->counters()->total_compile_size()->Increment(source_length);

1323 1324
  LanguageMode language_mode =
      construct_language_mode(FLAG_use_strict, FLAG_use_strong);
1325 1326
  parse_info->set_language_mode(
      static_cast<LanguageMode>(parse_info->language_mode() | language_mode));
1327

1328
  CompilationInfo compile_info(parse_info);
1329 1330
  // TODO(marja): FLAG_serialize_toplevel is not honoured and won't be; when the
  // real code caching lands, streaming needs to be adapted to use it.
1331
  return CompileToplevel(&compile_info);
1332 1333 1334
}


1335 1336 1337
Handle<SharedFunctionInfo> Compiler::BuildFunctionInfo(
    FunctionLiteral* literal, Handle<Script> script,
    CompilationInfo* outer_info) {
1338
  // Precondition: code has been parsed and scopes have been analyzed.
1339 1340 1341 1342 1343 1344
  Zone zone;
  ParseInfo parse_info(&zone, script);
  CompilationInfo info(&parse_info);
  parse_info.set_literal(literal);
  parse_info.set_scope(literal->scope());
  parse_info.set_language_mode(literal->scope()->language_mode());
1345
  if (outer_info->will_serialize()) info.PrepareForSerializing();
1346

1347 1348 1349 1350 1351 1352 1353 1354 1355 1356 1357 1358 1359
  Isolate* isolate = info.isolate();
  Factory* factory = isolate->factory();
  LiveEditFunctionTracker live_edit_tracker(isolate, literal);
  // Determine if the function can be lazily compiled. This is necessary to
  // allow some of our builtin JS files to be lazily compiled. These
  // builtins cannot be handled lazily by the parser, since we have to know
  // if a function uses the special natives syntax, which is something the
  // parser records.
  // If the debugger requests compilation for break points, we cannot be
  // aggressive about lazy compilation, because it might trigger compilation
  // of functions without an outer context when setting a breakpoint through
  // Debug::FindSharedFunctionInfoInScript.
  bool allow_lazy_without_ctx = literal->AllowsLazyCompilationWithoutContext();
1360 1361 1362
  bool allow_lazy =
      literal->AllowsLazyCompilation() &&
      !DebuggerWantsEagerCompilation(isolate, allow_lazy_without_ctx);
1363

1364
  if (outer_info->parse_info()->is_toplevel() && outer_info->will_serialize()) {
1365
    // Make sure that if the toplevel code (possibly to be serialized),
1366
    // the inner function must be allowed to be compiled lazily.
1367
    // This is necessary to serialize toplevel code without inner functions.
1368 1369 1370
    DCHECK(allow_lazy);
  }

1371 1372
  // Generate code
  Handle<ScopeInfo> scope_info;
1373
  if (FLAG_lazy && allow_lazy && !literal->should_eager_compile()) {
1374
    Handle<Code> code = isolate->builtins()->CompileLazy();
1375
    info.SetCode(code);
1376 1377 1378 1379 1380 1381 1382 1383
    // There's no need in theory for a lazy-compiled function to have a type
    // feedback vector, but some parts of the system expect all
    // SharedFunctionInfo instances to have one.  The size of the vector depends
    // on how many feedback-needing nodes are in the tree, and when lazily
    // parsing we might not know that, if this function was never parsed before.
    // In that case the vector will be replaced the next time MakeCode is
    // called.
    info.EnsureFeedbackVector();
1384
    scope_info = Handle<ScopeInfo>(ScopeInfo::Empty(isolate));
1385 1386
  } else if (Renumber(info.parse_info()) &&
             FullCodeGenerator::MakeCode(&info)) {
1387 1388
    // MakeCode will ensure that the feedback vector is present and
    // appropriately sized.
1389
    DCHECK(!info.code().is_null());
1390
    scope_info = ScopeInfo::Create(info.isolate(), info.zone(), info.scope());
1391 1392 1393 1394
    if (literal->should_eager_compile() &&
        literal->should_be_used_once_hint()) {
      info.code()->MarkToBeExecutedOnce(isolate);
    }
1395
  } else {
1396
    return Handle<SharedFunctionInfo>::null();
1397
  }
1398

1399
  // Create a shared function info object.
1400
  Handle<SharedFunctionInfo> result = factory->NewSharedFunctionInfo(
1401 1402
      literal->name(), literal->materialized_literal_count(), literal->kind(),
      info.code(), scope_info, info.feedback_vector());
1403 1404 1405 1406 1407

  SharedFunctionInfo::InitFromFunctionLiteral(result, literal);
  result->set_script(*script);
  result->set_is_toplevel(false);

1408
  RecordFunctionCompilation(Logger::FUNCTION_TAG, &info, result);
1409
  result->set_allows_lazy_compilation(literal->AllowsLazyCompilation());
1410 1411 1412 1413 1414 1415 1416
  result->set_allows_lazy_compilation_without_context(allow_lazy_without_ctx);

  // Set the expected number of properties for instances and return
  // the resulting function.
  SetExpectedNofPropertiesFromEstimate(result,
                                       literal->expected_property_count());
  live_edit_tracker.RecordFunctionInfo(result, literal, info.zone());
1417
  return result;
1418 1419 1420
}


1421 1422 1423 1424 1425 1426 1427 1428 1429
MaybeHandle<Code> Compiler::GetOptimizedCode(Handle<JSFunction> function,
                                             Handle<Code> current_code,
                                             ConcurrencyMode mode,
                                             BailoutId osr_ast_id) {
  Handle<Code> cached_code;
  if (GetCodeFromOptimizedCodeMap(
          function, osr_ast_id).ToHandle(&cached_code)) {
    return cached_code;
  }
1430 1431 1432

  SmartPointer<CompilationInfo> info(new CompilationInfoWithZone(function));
  Isolate* isolate = info->isolate();
1433
  DCHECK(AllowCompilation::IsAllowed(isolate));
1434
  VMState<COMPILER> state(isolate);
1435
  DCHECK(!isolate->has_pending_exception());
1436 1437
  PostponeInterruptsScope postpone(isolate);

1438
  Handle<SharedFunctionInfo> shared = info->shared_info();
1439 1440
  if (shared->code()->kind() != Code::FUNCTION ||
      ScopeInfo::Empty(isolate) == shared->scope_info()) {
1441
    // The function was never compiled. Compile it unoptimized first.
1442
    // TODO(titzer): reuse the AST and scope info from this compile.
1443 1444 1445 1446 1447
    CompilationInfoWithZone nested(function);
    nested.EnableDeoptimizationSupport();
    if (!GetUnoptimizedCodeCommon(&nested).ToHandle(&current_code)) {
      return MaybeHandle<Code>();
    }
1448
    shared->ReplaceCode(*current_code);
1449
  }
1450
  current_code->set_profiler_ticks(0);
1451

1452 1453 1454 1455 1456 1457 1458 1459 1460 1461 1462 1463
  // TODO(mstarzinger): We cannot properly deserialize a scope chain containing
  // an eval scope and hence would fail at parsing the eval source again.
  if (shared->disable_optimization_reason() == kEval) {
    return MaybeHandle<Code>();
  }

  // TODO(mstarzinger): We cannot properly deserialize a scope chain for the
  // builtin context, hence Genesis::InstallExperimentalNatives would fail.
  if (shared->is_toplevel() && isolate->bootstrapper()->IsActive()) {
    return MaybeHandle<Code>();
  }

1464
  info->SetOptimizing(osr_ast_id, current_code);
1465

1466 1467 1468 1469
  if (mode == CONCURRENT) {
    if (GetOptimizedCodeLater(info.get())) {
      info.Detach();  // The background recompile job owns this now.
      return isolate->builtins()->InOptimizationQueue();
1470
    }
1471 1472 1473
  } else {
    if (GetOptimizedCodeNow(info.get())) return info->code();
  }
1474

1475
  if (isolate->has_pending_exception()) isolate->clear_pending_exception();
1476
  return MaybeHandle<Code>();
1477 1478 1479
}


1480 1481 1482
Handle<Code> Compiler::GetConcurrentlyOptimizedCode(OptimizedCompileJob* job) {
  // Take ownership of compilation info.  Deleting compilation info
  // also tears down the zone and the recompile job.
1483
  SmartPointer<CompilationInfo> info(job->info());
1484
  Isolate* isolate = info->isolate();
1485

1486
  VMState<COMPILER> state(isolate);
1487
  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
1488

1489 1490 1491
  Handle<SharedFunctionInfo> shared = info->shared_info();
  shared->code()->set_profiler_ticks(0);

1492
  // 1) Optimization on the concurrent thread may have failed.
1493 1494 1495 1496
  // 2) The function may have already been optimized by OSR.  Simply continue.
  //    Except when OSR already disabled optimization for some reason.
  // 3) The code may have already been invalidated due to dependency change.
  // 4) Debugger may have been activated.
1497 1498 1499 1500
  // 5) Code generation may have failed.
  if (job->last_status() == OptimizedCompileJob::SUCCEEDED) {
    if (shared->optimization_disabled()) {
      job->RetryOptimization(kOptimizationDisabled);
1501
    } else if (info->dependencies()->HasAborted()) {
1502
      job->RetryOptimization(kBailedOutDueToDependencyChange);
1503
    } else if (isolate->debug()->has_break_points()) {
1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517
      job->RetryOptimization(kDebuggerHasBreakPoints);
    } else if (job->GenerateCode() == OptimizedCompileJob::SUCCEEDED) {
      RecordFunctionCompilation(Logger::LAZY_COMPILE_TAG, info.get(), shared);
      if (info->shared_info()->SearchOptimizedCodeMap(
              info->context()->native_context(), info->osr_ast_id()) == -1) {
        InsertCodeIntoOptimizedCodeMap(info.get());
      }
      if (FLAG_trace_opt) {
        PrintF("[completed optimizing ");
        info->closure()->ShortPrint();
        PrintF("]\n");
      }
      return Handle<Code>(*info->code());
    }
1518
  }
1519

1520 1521 1522 1523 1524
  DCHECK(job->last_status() != OptimizedCompileJob::SUCCEEDED);
  if (FLAG_trace_opt) {
    PrintF("[aborted optimizing ");
    info->closure()->ShortPrint();
    PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
1525
  }
1526
  return Handle<Code>::null();
1527 1528 1529
}


1530
bool Compiler::DebuggerWantsEagerCompilation(Isolate* isolate,
1531
                                             bool allow_lazy_without_ctx) {
1532 1533
  if (LiveEditFunctionTracker::IsActive(isolate)) return true;
  Debug* debug = isolate->debug();
1534 1535
  bool debugging = debug->is_active() || debug->has_break_points();
  return debugging && !allow_lazy_without_ctx;
1536 1537 1538
}


1539
CompilationPhase::CompilationPhase(const char* name, CompilationInfo* info)
1540
    : name_(name), info_(info) {
1541
  if (FLAG_hydrogen_stats) {
1542
    info_zone_start_allocation_size_ = info->zone()->allocation_size();
1543
    timer_.Start();
1544 1545 1546 1547 1548 1549
  }
}


CompilationPhase::~CompilationPhase() {
  if (FLAG_hydrogen_stats) {
1550
    size_t size = zone()->allocation_size();
1551
    size += info_->zone()->allocation_size() - info_zone_start_allocation_size_;
1552
    isolate()->GetHStatistics()->SaveTiming(name_, timer_.Elapsed(), size);
1553 1554 1555 1556 1557
  }
}


bool CompilationPhase::ShouldProduceTraceOutput() const {
1558 1559
  // Trace if the appropriate trace flag is set and the phase name's first
  // character is in the FLAG_trace_phase command line parameter.
1560
  AllowHandleDereference allow_deref;
1561 1562 1563 1564
  bool tracing_on = info()->IsStub()
      ? FLAG_trace_hydrogen_stubs
      : (FLAG_trace_hydrogen &&
         info()->closure()->PassesFilter(FLAG_trace_hydrogen_filter));
1565
  return (tracing_on &&
1566
      base::OS::StrChr(const_cast<char*>(FLAG_trace_phase), name_[0]) != NULL);
1567 1568
}

1569 1570 1571 1572 1573 1574 1575

#if DEBUG
void CompilationInfo::PrintAstForTesting() {
  PrintF("--- Source from AST ---\n%s\n",
         PrettyPrinter(isolate(), zone()).PrintProgram(function()));
}
#endif
1576 1577
}  // namespace internal
}  // namespace v8