compiler.cc 72 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#include "src/compiler.h"
6

7
#include <algorithm>
8
#include <memory>
9

10
#include "src/asmjs/asm-js.h"
11
#include "src/asmjs/asm-typer.h"
12 13 14
#include "src/ast/ast-numbering.h"
#include "src/ast/prettyprinter.h"
#include "src/ast/scopes.h"
15 16 17
#include "src/bootstrapper.h"
#include "src/codegen.h"
#include "src/compilation-cache.h"
18
#include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
19
#include "src/compiler/pipeline.h"
20
#include "src/crankshaft/hydrogen.h"
21 22
#include "src/debug/debug.h"
#include "src/debug/liveedit.h"
23
#include "src/deoptimizer.h"
24
#include "src/frames-inl.h"
25
#include "src/full-codegen/full-codegen.h"
yangguo's avatar
yangguo committed
26
#include "src/globals.h"
27
#include "src/heap/heap.h"
28
#include "src/interpreter/interpreter.h"
29
#include "src/isolate-inl.h"
30
#include "src/log-inl.h"
31
#include "src/messages.h"
32 33 34
#include "src/parsing/parser.h"
#include "src/parsing/rewriter.h"
#include "src/parsing/scanner-character-streams.h"
35
#include "src/runtime-profiler.h"
36
#include "src/snapshot/code-serializer.h"
37
#include "src/vm-state-inl.h"
38

39 40
namespace v8 {
namespace internal {
41

42

43

44 45 46
// A wrapper around a CompilationInfo that detaches the Handles from
// the underlying DeferredHandleScope and stores them in info_ on
// destruction.
47
class CompilationHandleScope final {
48 49 50 51 52 53 54 55 56
 public:
  explicit CompilationHandleScope(CompilationInfo* info)
      : deferred_(info->isolate()), info_(info) {}
  ~CompilationHandleScope() { info_->set_deferred_handles(deferred_.Detach()); }

 private:
  DeferredHandleScope deferred_;
  CompilationInfo* info_;
};
57

58 59 60 61 62 63 64 65 66 67 68 69 70
// Helper that times a scoped region and records the elapsed time.
struct ScopedTimer {
  explicit ScopedTimer(base::TimeDelta* location) : location_(location) {
    DCHECK(location_ != NULL);
    timer_.Start();
  }

  ~ScopedTimer() { *location_ += timer_.Elapsed(); }

  base::ElapsedTimer timer_;
  base::TimeDelta* location_;
};

71
// ----------------------------------------------------------------------------
72
// Implementation of CompilationJob
73

74 75
CompilationJob::Status CompilationJob::PrepareJob() {
  DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
76
  DisallowJavascriptExecution no_js(isolate());
77

78
  if (FLAG_trace_opt && info()->IsOptimizing()) {
79 80 81 82 83 84 85
    OFStream os(stdout);
    os << "[compiling method " << Brief(*info()->closure()) << " using "
       << compiler_name_;
    if (info()->is_osr()) os << " OSR";
    os << "]" << std::endl;
  }

86
  // Delegate to the underlying implementation.
87 88 89
  DCHECK(state() == State::kReadyToPrepare);
  ScopedTimer t(&time_taken_to_prepare_);
  return UpdateState(PrepareJobImpl(), State::kReadyToExecute);
90 91
}

92
CompilationJob::Status CompilationJob::ExecuteJob() {
93 94 95 96 97 98 99 100 101 102 103 104
  std::unique_ptr<DisallowHeapAllocation> no_allocation;
  std::unique_ptr<DisallowHandleAllocation> no_handles;
  std::unique_ptr<DisallowHandleDereference> no_deref;
  std::unique_ptr<DisallowCodeDependencyChange> no_dependency_change;
  if (can_execute_on_background_thread()) {
    no_allocation.reset(new DisallowHeapAllocation());
    no_handles.reset(new DisallowHandleAllocation());
    no_deref.reset(new DisallowHandleDereference());
    no_dependency_change.reset(new DisallowCodeDependencyChange());
  } else {
    DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
  }
105

106
  // Delegate to the underlying implementation.
107 108 109
  DCHECK(state() == State::kReadyToExecute);
  ScopedTimer t(&time_taken_to_execute_);
  return UpdateState(ExecuteJobImpl(), State::kReadyToFinalize);
110
}
111

112 113
CompilationJob::Status CompilationJob::FinalizeJob() {
  DCHECK(ThreadId::Current().Equals(info()->isolate()->thread_id()));
114 115 116
  DisallowCodeDependencyChange no_dependency_change;
  DisallowJavascriptExecution no_js(isolate());
  DCHECK(!info()->dependencies()->HasAborted());
117

118
  // Delegate to the underlying implementation.
119 120 121
  DCHECK(state() == State::kReadyToFinalize);
  ScopedTimer t(&time_taken_to_finalize_);
  return UpdateState(FinalizeJobImpl(), State::kSucceeded);
122 123
}

124 125 126 127 128 129 130 131 132 133 134 135 136 137
CompilationJob::Status CompilationJob::RetryOptimization(BailoutReason reason) {
  DCHECK(info_->IsOptimizing());
  info_->RetryOptimization(reason);
  state_ = State::kFailed;
  return FAILED;
}

CompilationJob::Status CompilationJob::AbortOptimization(BailoutReason reason) {
  DCHECK(info_->IsOptimizing());
  info_->AbortOptimization(reason);
  state_ = State::kFailed;
  return FAILED;
}

138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188
void CompilationJob::RecordUnoptimizedCompilationStats() const {
  int code_size;
  if (info()->has_bytecode_array()) {
    code_size = info()->bytecode_array()->SizeIncludingMetadata();
  } else {
    code_size = info()->code()->SizeIncludingMetadata();
  }

  Counters* counters = isolate()->counters();
  // TODO(4280): Rename counters from "baseline" to "unoptimized" eventually.
  counters->total_baseline_code_size()->Increment(code_size);
  counters->total_baseline_compile_count()->Increment(1);

  // TODO(5203): Add timers for each phase of compilation.
}

void CompilationJob::RecordOptimizedCompilationStats() const {
  DCHECK(info()->IsOptimizing());
  Handle<JSFunction> function = info()->closure();
  if (!function->IsOptimized()) {
    // Concurrent recompilation and OSR may race.  Increment only once.
    int opt_count = function->shared()->opt_count();
    function->shared()->set_opt_count(opt_count + 1);
  }
  double ms_creategraph = time_taken_to_prepare_.InMillisecondsF();
  double ms_optimize = time_taken_to_execute_.InMillisecondsF();
  double ms_codegen = time_taken_to_finalize_.InMillisecondsF();
  if (FLAG_trace_opt) {
    PrintF("[optimizing ");
    function->ShortPrint();
    PrintF(" - took %0.3f, %0.3f, %0.3f ms]\n", ms_creategraph, ms_optimize,
           ms_codegen);
  }
  if (FLAG_trace_opt_stats) {
    static double compilation_time = 0.0;
    static int compiled_functions = 0;
    static int code_size = 0;

    compilation_time += (ms_creategraph + ms_optimize + ms_codegen);
    compiled_functions++;
    code_size += function->shared()->SourceSize();
    PrintF("Compiled: %d functions with %d byte source size in %fms.\n",
           compiled_functions, code_size, compilation_time);
  }
  if (FLAG_hydrogen_stats) {
    isolate()->GetHStatistics()->IncrementSubtotals(time_taken_to_prepare_,
                                                    time_taken_to_execute_,
                                                    time_taken_to_finalize_);
  }
}

189 190
Isolate* CompilationJob::isolate() const { return info()->isolate(); }

191 192 193 194 195 196
namespace {

void AddWeakObjectToCodeDependency(Isolate* isolate, Handle<HeapObject> object,
                                   Handle<Code> code) {
  Handle<WeakCell> cell = Code::WeakCellFor(code);
  Heap* heap = isolate->heap();
197 198 199 200 201 202 203 204
  if (heap->InNewSpace(*object)) {
    heap->AddWeakNewSpaceObjectToCodeDependency(object, cell);
  } else {
    Handle<DependentCode> dep(heap->LookupWeakObjectToCodeDependency(object));
    dep =
        DependentCode::InsertWeakCode(dep, DependentCode::kWeakCodeGroup, cell);
    heap->AddWeakObjectToCodeDependency(object, dep);
  }
205 206
}

207
}  // namespace
208

209
void CompilationJob::RegisterWeakObjectsInOptimizedCode(Handle<Code> code) {
210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248
  // TODO(turbofan): Move this to pipeline.cc once Crankshaft dies.
  Isolate* const isolate = code->GetIsolate();
  DCHECK(code->is_optimized_code());
  std::vector<Handle<Map>> maps;
  std::vector<Handle<HeapObject>> objects;
  {
    DisallowHeapAllocation no_gc;
    int const mode_mask = RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
                          RelocInfo::ModeMask(RelocInfo::CELL);
    for (RelocIterator it(*code, mode_mask); !it.done(); it.next()) {
      RelocInfo::Mode mode = it.rinfo()->rmode();
      if (mode == RelocInfo::CELL &&
          code->IsWeakObjectInOptimizedCode(it.rinfo()->target_cell())) {
        objects.push_back(handle(it.rinfo()->target_cell(), isolate));
      } else if (mode == RelocInfo::EMBEDDED_OBJECT &&
                 code->IsWeakObjectInOptimizedCode(
                     it.rinfo()->target_object())) {
        Handle<HeapObject> object(HeapObject::cast(it.rinfo()->target_object()),
                                  isolate);
        if (object->IsMap()) {
          maps.push_back(Handle<Map>::cast(object));
        } else {
          objects.push_back(object);
        }
      }
    }
  }
  for (Handle<Map> map : maps) {
    if (map->dependent_code()->IsEmpty(DependentCode::kWeakCodeGroup)) {
      isolate->heap()->AddRetainedMap(map);
    }
    Map::AddDependentCode(map, DependentCode::kWeakCodeGroup, code);
  }
  for (Handle<HeapObject> object : objects) {
    AddWeakObjectToCodeDependency(isolate, object, code);
  }
  code->set_can_have_weak_objects(true);
}

249 250 251 252
// ----------------------------------------------------------------------------
// Local helper methods that make up the compilation pipeline.

namespace {
253

254 255 256 257 258 259
bool IsEvalToplevel(Handle<SharedFunctionInfo> shared) {
  return shared->is_toplevel() && shared->script()->IsScript() &&
         Script::cast(shared->script())->compilation_type() ==
             Script::COMPILATION_TYPE_EVAL;
}

260
void RecordFunctionCompilation(CodeEventListener::LogEventsAndTags tag,
261
                               CompilationInfo* info) {
262 263 264 265
  // Log the code generation. If source information is available include
  // script name and line number. Check explicitly whether logging is
  // enabled as finding the line number is not free.
  if (info->isolate()->logger()->is_logging_code_events() ||
266
      info->isolate()->is_profiling()) {
267
    Handle<SharedFunctionInfo> shared = info->shared_info();
268
    Handle<Script> script = info->parse_info()->script();
269 270 271 272
    Handle<AbstractCode> abstract_code =
        info->has_bytecode_array()
            ? Handle<AbstractCode>::cast(info->bytecode_array())
            : Handle<AbstractCode>::cast(info->code());
273 274
    if (abstract_code.is_identical_to(
            info->isolate()->builtins()->CompileLazy())) {
275 276 277 278 279 280 281 282
      return;
    }
    int line_num = Script::GetLineNumber(script, shared->start_position()) + 1;
    int column_num =
        Script::GetColumnNumber(script, shared->start_position()) + 1;
    String* script_name = script->name()->IsString()
                              ? String::cast(script->name())
                              : info->isolate()->heap()->empty_string();
283 284
    CodeEventListener::LogEventsAndTags log_tag =
        Logger::ToNativeByScript(tag, *script);
285
    PROFILE(info->isolate(),
286
            CodeCreateEvent(log_tag, *abstract_code, *shared, script_name,
287 288 289 290
                            line_num, column_num));
  }
}

291
void EnsureFeedbackMetadata(CompilationInfo* info) {
292
  DCHECK(info->has_shared_info());
293

294
  // If no type feedback metadata exists, we create it now. At this point the
295 296 297
  // AstNumbering pass has already run. Note the snapshot can contain outdated
  // vectors for a different configuration, hence we also recreate a new vector
  // when the function is not compiled (i.e. no code was serialized).
298 299 300

  // TODO(mvstanton): reintroduce is_empty() predicate to feedback_metadata().
  if (info->shared_info()->feedback_metadata()->length() == 0 ||
301
      !info->shared_info()->is_compiled()) {
302 303
    Handle<TypeFeedbackMetadata> feedback_metadata = TypeFeedbackMetadata::New(
        info->isolate(), info->literal()->feedback_vector_spec());
304
    info->shared_info()->set_feedback_metadata(*feedback_metadata);
305 306 307 308
  }

  // It's very important that recompiles do not alter the structure of the type
  // feedback vector. Verify that the structure fits the function literal.
309
  CHECK(!info->shared_info()->feedback_metadata()->SpecDiffersFrom(
310 311 312
      info->literal()->feedback_vector_spec()));
}

313
bool ShouldUseIgnition(CompilationInfo* info) {
314 315
  if (!FLAG_ignition) return false;

316 317 318 319 320
  DCHECK(info->has_shared_info());

  // When requesting debug code as a replacement for existing code, we provide
  // the same kind as the existing code (to prevent implicit tier-change).
  if (info->is_debug() && info->shared_info()->is_compiled()) {
321
    return !info->shared_info()->HasBaselineCode();
322 323
  }

324 325 326 327 328
  // Since we can't OSR from Ignition, skip Ignition for asm.js functions.
  if (info->shared_info()->asm_function()) {
    return false;
  }

329
  // Checks whether top level functions should be passed by the filter.
330
  if (info->shared_info()->is_toplevel()) {
331 332 333 334 335
    Vector<const char> filter = CStrVector(FLAG_ignition_filter);
    return (filter.length() == 0) || (filter.length() == 1 && filter[0] == '*');
  }

  // Finally respect the filter.
336
  return info->shared_info()->PassesFilter(FLAG_ignition_filter);
337 338
}

339 340 341 342 343 344 345 346 347 348 349
CompilationJob* GetUnoptimizedCompilationJob(CompilationInfo* info) {
  // Function should have been parsed and analyzed before creating a compilation
  // job.
  DCHECK_NOT_NULL(info->literal());
  DCHECK_NOT_NULL(info->scope());

  EnsureFeedbackMetadata(info);
  if (ShouldUseIgnition(info)) {
    return interpreter::Interpreter::NewCompilationJob(info);
  } else {
    return FullCodeGenerator::NewCompilationJob(info);
350 351 352
  }
}

353
bool GenerateUnoptimizedCode(CompilationInfo* info) {
354 355
  if (FLAG_validate_asm && info->scope()->asm_module() &&
      !info->shared_info()->is_asm_wasm_broken()) {
356
    EnsureFeedbackMetadata(info);
357 358 359 360 361 362
    MaybeHandle<FixedArray> wasm_data;
    wasm_data = AsmJs::ConvertAsmToWasm(info->parse_info());
    if (!wasm_data.is_null()) {
      info->shared_info()->set_asm_wasm_data(*wasm_data.ToHandleChecked());
      info->SetCode(info->isolate()->builtins()->InstantiateAsmJs());
      return true;
363 364
    }
  }
365 366 367 368 369 370 371

  std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info));
  if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
  if (job->ExecuteJob() != CompilationJob::SUCCEEDED) return false;
  if (job->FinalizeJob() != CompilationJob::SUCCEEDED) return false;
  job->RecordUnoptimizedCompilationStats();
  return true;
372 373
}

374
bool CompileUnoptimizedCode(CompilationInfo* info) {
375
  DCHECK(AllowCompilation::IsAllowed(info->isolate()));
376 377
  if (!Compiler::Analyze(info->parse_info()) ||
      !GenerateUnoptimizedCode(info)) {
378 379
    Isolate* isolate = info->isolate();
    if (!isolate->has_pending_exception()) isolate->StackOverflow();
380
    return false;
381
  }
382
  return true;
383 384
}

385 386
void InstallSharedScopeInfo(CompilationInfo* info,
                            Handle<SharedFunctionInfo> shared) {
387
  Handle<ScopeInfo> scope_info = info->scope()->scope_info();
388 389 390 391 392
  shared->set_scope_info(*scope_info);
}

void InstallSharedCompilationResult(CompilationInfo* info,
                                    Handle<SharedFunctionInfo> shared) {
393 394 395 396 397 398
  // TODO(mstarzinger): Compiling for debug code might be used to reveal inner
  // functions via {FindSharedFunctionInfoInScript}, in which case we end up
  // regenerating existing bytecode. Fix this!
  if (info->is_debug() && info->has_bytecode_array()) {
    shared->ClearBytecodeArray();
  }
399 400 401 402 403 404 405
  DCHECK(!info->code().is_null());
  shared->ReplaceCode(*info->code());
  if (info->has_bytecode_array()) {
    DCHECK(!shared->HasBytecodeArray());  // Only compiled once.
    shared->set_bytecode_array(*info->bytecode_array());
  }
}
406

407 408 409 410 411 412 413 414 415 416 417 418 419
void InstallUnoptimizedCode(CompilationInfo* info) {
  Handle<SharedFunctionInfo> shared = info->shared_info();

  // Update the shared function info with the scope info.
  InstallSharedScopeInfo(info, shared);

  // Install compilation result on the shared function info
  InstallSharedCompilationResult(info, shared);

  // Record the function compilation event.
  RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
}

420
MUST_USE_RESULT MaybeHandle<Code> GetUnoptimizedCode(CompilationInfo* info) {
421 422
  VMState<COMPILER> state(info->isolate());
  PostponeInterruptsScope postpone(info->isolate());
423

424 425
  // Create a canonical handle scope before internalizing parsed values if
  // compiling bytecode. This is required for off-thread bytecode generation.
426 427 428
  std::unique_ptr<CanonicalHandleScope> canonical;
  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info->isolate()));

429
  // Parse and update CompilationInfo with the results.
430
  if (!Parser::ParseStatic(info->parse_info())) return MaybeHandle<Code>();
431 432
  DCHECK_EQ(info->shared_info()->language_mode(),
            info->literal()->language_mode());
433

434
  // Compile either unoptimized code or bytecode for the interpreter.
435
  if (!CompileUnoptimizedCode(info)) return MaybeHandle<Code>();
436

437
  InstallUnoptimizedCode(info);
438

439 440
  return info->code();
}
441

442 443 444 445 446 447 448 449 450 451
CompilationJob::Status FinalizeUnoptimizedCompilationJob(CompilationJob* job) {
  CompilationJob::Status status = job->FinalizeJob();
  if (status == CompilationJob::SUCCEEDED) {
    DCHECK(!job->info()->shared_info()->is_compiled());
    InstallUnoptimizedCode(job->info());
    job->RecordUnoptimizedCompilationStats();
  }
  return status;
}

452
MUST_USE_RESULT MaybeHandle<Code> GetCodeFromOptimizedCodeMap(
453
    Handle<JSFunction> function, BailoutId osr_ast_id) {
454 455 456 457 458 459 460 461 462 463
  Handle<SharedFunctionInfo> shared(function->shared());
  DisallowHeapAllocation no_gc;
  CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
      function->context()->native_context(), osr_ast_id);
  if (cached.code != nullptr) {
    // Caching of optimized code enabled and optimized code found.
    if (cached.literals != nullptr) function->set_literals(cached.literals);
    DCHECK(!cached.code->marked_for_deoptimization());
    DCHECK(function->shared()->is_compiled());
    return Handle<Code>(cached.code);
464 465 466 467
  }
  return MaybeHandle<Code>();
}

468
void InsertCodeIntoOptimizedCodeMap(CompilationInfo* info) {
469 470 471
  Handle<Code> code = info->code();
  if (code->kind() != Code::OPTIMIZED_FUNCTION) return;  // Nothing to do.

472 473
  // Function context specialization folds-in the function context,
  // so no sharing can occur.
474 475
  if (info->is_function_context_specializing()) return;
  // Frame specialization implies function context specialization.
476
  DCHECK(!info->is_frame_specializing());
477

478 479 480 481
  // TODO(4764): When compiling for OSR from bytecode, BailoutId might derive
  // from bytecode offset and overlap with actual BailoutId. No caching!
  if (info->is_osr() && info->is_optimizing_from_bytecode()) return;

482
  // Cache optimized context-specific code.
483
  Handle<JSFunction> function = info->closure();
484 485 486 487 488
  Handle<SharedFunctionInfo> shared(function->shared());
  Handle<LiteralsArray> literals(function->literals());
  Handle<Context> native_context(function->context()->native_context());
  SharedFunctionInfo::AddToOptimizedCodeMap(shared, native_context, code,
                                            literals, info->osr_ast_id());
489

490
  // Do not cache (native) context-independent code compiled for OSR.
491 492
  if (code->is_turbofanned() && info->is_osr()) return;

493 494 495
  // Cache optimized (native) context-independent code.
  if (FLAG_turbo_cache_shared_code && code->is_turbofanned() &&
      !info->is_native_context_specializing()) {
496
    DCHECK(!info->is_function_context_specializing());
497 498 499 500
    DCHECK(info->osr_ast_id().IsNone());
    Handle<SharedFunctionInfo> shared(function->shared());
    SharedFunctionInfo::AddSharedCodeToOptimizedCodeMap(shared, code);
  }
501 502
}

503
bool Renumber(ParseInfo* parse_info) {
504
  if (!AstNumbering::Renumber(parse_info->isolate(), parse_info->zone(),
505
                              parse_info->literal())) {
506 507
    return false;
  }
508 509
  Handle<SharedFunctionInfo> shared_info = parse_info->shared_info();
  if (!shared_info.is_null()) {
510
    FunctionLiteral* lit = parse_info->literal();
511
    shared_info->set_ast_node_count(lit->ast_node_count());
512 513 514
    if (lit->dont_optimize_reason() != kNoReason) {
      shared_info->DisableOptimization(lit->dont_optimize_reason());
    }
515 516 517
    if (lit->flags() & AstProperties::kDontCrankshaft) {
      shared_info->set_dont_crankshaft(true);
    }
518 519 520 521
  }
  return true;
}

522
bool UseTurboFan(Handle<SharedFunctionInfo> shared) {
523 524
  bool optimization_disabled = shared->optimization_disabled();
  bool dont_crankshaft = shared->dont_crankshaft();
525 526 527

  // Check the enabling conditions for Turbofan.
  // 1. "use asm" code.
528 529
  bool is_turbofanable_asm =
      FLAG_turbo_asm && shared->asm_function() && !optimization_disabled;
530 531 532 533 534 535 536

  // 2. Fallback for features unsupported by Crankshaft.
  bool is_unsupported_by_crankshaft_but_turbofanable =
      dont_crankshaft && strcmp(FLAG_turbo_filter, "~~") == 0 &&
      !optimization_disabled;

  // 3. Explicitly enabled by the command-line filter.
537
  bool passes_turbo_filter = shared->PassesFilter(FLAG_turbo_filter);
538

539 540
  return is_turbofanable_asm || is_unsupported_by_crankshaft_but_turbofanable ||
         passes_turbo_filter;
541 542
}

543
bool GetOptimizedCodeNow(CompilationJob* job) {
544
  CompilationInfo* info = job->info();
545
  Isolate* isolate = info->isolate();
546 547

  // Parsing is not required when optimizing from existing bytecode.
548
  if (!info->is_optimizing_from_bytecode()) {
549
    if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
550
    EnsureFeedbackMetadata(info);
551
  }
552

553 554
  JSFunction::EnsureLiterals(info->closure());

555
  TimerEventScope<TimerEventRecompileSynchronous> timer(isolate);
556 557
  RuntimeCallTimerScope runtimeTimer(isolate,
                                     &RuntimeCallStats::RecompileSynchronous);
558 559
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
               "V8.RecompileSynchronous");
560

561 562 563
  if (job->PrepareJob() != CompilationJob::SUCCEEDED ||
      job->ExecuteJob() != CompilationJob::SUCCEEDED ||
      job->FinalizeJob() != CompilationJob::SUCCEEDED) {
564 565 566 567 568 569 570
    if (FLAG_trace_opt) {
      PrintF("[aborted optimizing ");
      info->closure()->ShortPrint();
      PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
    }
    return false;
  }
571 572

  // Success!
573
  job->RecordOptimizedCompilationStats();
574
  DCHECK(!isolate->has_pending_exception());
575
  InsertCodeIntoOptimizedCodeMap(info);
576
  RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
577 578 579
  return true;
}

580
bool GetOptimizedCodeLater(CompilationJob* job) {
581
  CompilationInfo* info = job->info();
582
  Isolate* isolate = info->isolate();
583

584
  if (!isolate->optimizing_compile_dispatcher()->IsQueueAvailable()) {
585 586
    if (FLAG_trace_concurrent_recompilation) {
      PrintF("  ** Compilation queue full, will retry optimizing ");
587
      info->closure()->ShortPrint();
588 589 590 591 592
      PrintF(" later.\n");
    }
    return false;
  }

593 594 595 596 597 598 599 600 601
  if (isolate->heap()->HighMemoryPressure()) {
    if (FLAG_trace_concurrent_recompilation) {
      PrintF("  ** High memory pressure, will retry optimizing ");
      info->closure()->ShortPrint();
      PrintF(" later.\n");
    }
    return false;
  }

602
  // Parsing is not required when optimizing from existing bytecode.
603
  if (!info->is_optimizing_from_bytecode()) {
604
    if (!Compiler::ParseAndAnalyze(info->parse_info())) return false;
605
    EnsureFeedbackMetadata(info);
606
  }
607

608 609
  JSFunction::EnsureLiterals(info->closure());

610
  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
611 612
  RuntimeCallTimerScope runtimeTimer(info->isolate(),
                                     &RuntimeCallStats::RecompileSynchronous);
613 614
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
               "V8.RecompileSynchronous");
615

616
  if (job->PrepareJob() != CompilationJob::SUCCEEDED) return false;
617
  isolate->optimizing_compile_dispatcher()->QueueForOptimization(job);
618 619 620

  if (FLAG_trace_concurrent_recompilation) {
    PrintF("  ** Queued ");
621
    info->closure()->ShortPrint();
622
    PrintF(" for concurrent optimization.\n");
623 624 625 626
  }
  return true;
}

627 628 629 630 631 632 633
MaybeHandle<Code> GetOptimizedCode(Handle<JSFunction> function,
                                   Compiler::ConcurrencyMode mode,
                                   BailoutId osr_ast_id = BailoutId::None(),
                                   JavaScriptFrame* osr_frame = nullptr) {
  Isolate* isolate = function->GetIsolate();
  Handle<SharedFunctionInfo> shared(function->shared(), isolate);

634 635 636 637 638 639
  bool ignition_osr = osr_frame && osr_frame->is_interpreted();
  DCHECK_IMPLIES(ignition_osr, !osr_ast_id.IsNone());
  DCHECK_IMPLIES(ignition_osr, FLAG_ignition_osr);

  // Flag combination --ignition-osr --no-turbo-from-bytecode is unsupported.
  if (ignition_osr && !FLAG_turbo_from_bytecode) return MaybeHandle<Code>();
640

641
  Handle<Code> cached_code;
642 643 644 645
  // TODO(4764): When compiling for OSR from bytecode, BailoutId might derive
  // from bytecode offset and overlap with actual BailoutId. No lookup!
  if (!ignition_osr &&
      GetCodeFromOptimizedCodeMap(function, osr_ast_id)
646 647 648 649 650 651 652 653 654 655 656 657
          .ToHandle(&cached_code)) {
    if (FLAG_trace_opt) {
      PrintF("[found optimized code for ");
      function->ShortPrint();
      if (!osr_ast_id.IsNone()) {
        PrintF(" at OSR AST id %d", osr_ast_id.ToInt());
      }
      PrintF("]\n");
    }
    return cached_code;
  }

658
  // Reset profiler ticks, function is no longer considered hot.
659 660
  if (shared->is_compiled()) {
    shared->code()->set_profiler_ticks(0);
661 662 663 664 665
  }

  VMState<COMPILER> state(isolate);
  DCHECK(!isolate->has_pending_exception());
  PostponeInterruptsScope postpone(isolate);
666
  bool use_turbofan = UseTurboFan(shared) || ignition_osr;
667
  std::unique_ptr<CompilationJob> job(
668 669 670
      use_turbofan ? compiler::Pipeline::NewCompilationJob(function)
                   : new HCompilationJob(function));
  CompilationInfo* info = job->info();
671
  ParseInfo* parse_info = info->parse_info();
672

673
  info->SetOptimizingForOsr(osr_ast_id, osr_frame);
674

675 676 677 678 679 680 681 682 683 684 685 686 687 688
  // Do not use Crankshaft/TurboFan if we need to be able to set break points.
  if (info->shared_info()->HasDebugInfo()) {
    info->AbortOptimization(kFunctionBeingDebugged);
    return MaybeHandle<Code>();
  }

  // Limit the number of times we try to optimize functions.
  const int kMaxOptCount =
      FLAG_deopt_every_n_times == 0 ? FLAG_max_opt_count : 1000;
  if (info->shared_info()->opt_count() > kMaxOptCount) {
    info->AbortOptimization(kOptimizedTooManyTimes);
    return MaybeHandle<Code>();
  }

689
  TimerEventScope<TimerEventOptimizeCode> optimize_code_timer(isolate);
690
  RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::OptimizeCode);
691
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.OptimizeCode");
692

693
  // TurboFan can optimize directly from existing bytecode.
694 695 696 697 698
  if (FLAG_turbo_from_bytecode && use_turbofan && ShouldUseIgnition(info)) {
    if (!Compiler::EnsureBytecode(info)) {
      if (isolate->has_pending_exception()) isolate->clear_pending_exception();
      return MaybeHandle<Code>();
    }
699 700 701
    info->MarkAsOptimizeFromBytecode();
  }

702 703 704 705 706 707 708 709
  if (IsEvalToplevel(shared)) {
    parse_info->set_eval();
    if (function->context()->IsNativeContext()) parse_info->set_global();
    parse_info->set_toplevel();
    parse_info->set_allow_lazy_parsing(false);
    parse_info->set_lazy(false);
  }

710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725
  // In case of concurrent recompilation, all handles below this point will be
  // allocated in a deferred handle scope that is detached and handed off to
  // the background thread when we return.
  std::unique_ptr<CompilationHandleScope> compilation;
  if (mode == Compiler::CONCURRENT) {
    compilation.reset(new CompilationHandleScope(info));
  }

  // In case of TurboFan, all handles below will be canonicalized.
  std::unique_ptr<CanonicalHandleScope> canonical;
  if (use_turbofan) canonical.reset(new CanonicalHandleScope(info->isolate()));

  // Reopen handles in the new CompilationHandleScope.
  info->ReopenHandlesInNewHandleScope();
  parse_info->ReopenHandlesInNewHandleScope();

726
  if (mode == Compiler::CONCURRENT) {
727
    if (GetOptimizedCodeLater(job.get())) {
728
      job.release();  // The background recompile job owns this now.
729 730 731
      return isolate->builtins()->InOptimizationQueue();
    }
  } else {
732
    if (GetOptimizedCodeNow(job.get())) return info->code();
733 734 735 736 737 738
  }

  if (isolate->has_pending_exception()) isolate->clear_pending_exception();
  return MaybeHandle<Code>();
}

739 740 741 742 743 744 745
CompilationJob::Status FinalizeOptimizedCompilationJob(CompilationJob* job) {
  CompilationInfo* info = job->info();
  Isolate* isolate = info->isolate();

  TimerEventScope<TimerEventRecompileSynchronous> timer(info->isolate());
  RuntimeCallTimerScope runtimeTimer(isolate,
                                     &RuntimeCallStats::RecompileSynchronous);
746 747
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
               "V8.RecompileSynchronous");
748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792

  Handle<SharedFunctionInfo> shared = info->shared_info();
  shared->code()->set_profiler_ticks(0);

  DCHECK(!shared->HasDebugInfo());

  // 1) Optimization on the concurrent thread may have failed.
  // 2) The function may have already been optimized by OSR.  Simply continue.
  //    Except when OSR already disabled optimization for some reason.
  // 3) The code may have already been invalidated due to dependency change.
  // 4) Code generation may have failed.
  if (job->state() == CompilationJob::State::kReadyToFinalize) {
    if (shared->optimization_disabled()) {
      job->RetryOptimization(kOptimizationDisabled);
    } else if (info->dependencies()->HasAborted()) {
      job->RetryOptimization(kBailedOutDueToDependencyChange);
    } else if (job->FinalizeJob() == CompilationJob::SUCCEEDED) {
      job->RecordOptimizedCompilationStats();
      RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, info);
      if (shared
              ->SearchOptimizedCodeMap(info->context()->native_context(),
                                       info->osr_ast_id())
              .code == nullptr) {
        InsertCodeIntoOptimizedCodeMap(info);
      }
      if (FLAG_trace_opt) {
        PrintF("[completed optimizing ");
        info->closure()->ShortPrint();
        PrintF("]\n");
      }
      info->closure()->ReplaceCode(*info->code());
      return CompilationJob::SUCCEEDED;
    }
  }

  DCHECK(job->state() == CompilationJob::State::kFailed);
  if (FLAG_trace_opt) {
    PrintF("[aborted optimizing ");
    info->closure()->ShortPrint();
    PrintF(" because: %s]\n", GetBailoutReason(info->bailout_reason()));
  }
  info->closure()->ReplaceCode(shared->code());
  return CompilationJob::FAILED;
}

793 794
class InterpreterActivationsFinder : public ThreadVisitor,
                                     public OptimizedFunctionVisitor {
795 796 797 798 799
 public:
  explicit InterpreterActivationsFinder(SharedFunctionInfo* shared)
      : shared_(shared), has_activations_(false) {}

  void VisitThread(Isolate* isolate, ThreadLocalTop* top) {
800
    Address* activation_pc_address = nullptr;
801
    JavaScriptFrameIterator it(isolate, top);
802
    for (; !it.done(); it.Advance()) {
803
      JavaScriptFrame* frame = it.frame();
804 805 806 807 808 809 810 811
      if (FLAG_turbo_from_bytecode && FLAG_ignition_osr &&
          frame->is_optimized() && frame->function()->shared() == shared_) {
        // If we are able to optimize functions directly from bytecode, then
        // there might be optimized OSR code active on the stack that is not
        // reachable through a function. We count this as an activation.
        has_activations_ = true;
      }
      if (frame->is_interpreted() && frame->function()->shared() == shared_) {
812 813 814 815 816 817 818
        has_activations_ = true;
        activation_pc_address = frame->pc_address();
      }
    }

    if (activation_pc_address) {
      activation_pc_addresses_.push_back(activation_pc_address);
819 820
    }
  }
821 822 823 824 825 826 827

  void VisitFunction(JSFunction* function) {
    if (function->Inlines(shared_)) has_activations_ = true;
  }

  void EnterContext(Context* context) {}
  void LeaveContext(Context* context) {}
828 829 830 831 832 833 834 835 836 837 838 839 840 841 842 843 844 845 846 847

  bool MarkActivationsForBaselineOnReturn(Isolate* isolate) {
    if (activation_pc_addresses_.empty()) return false;

    for (Address* activation_pc_address : activation_pc_addresses_) {
      DCHECK(isolate->inner_pointer_to_code_cache()
                 ->GetCacheEntry(*activation_pc_address)
                 ->code->is_interpreter_trampoline_builtin());
      *activation_pc_address =
          isolate->builtins()->InterpreterMarkBaselineOnReturn()->entry();
    }
    return true;
  }

  bool has_activations() { return has_activations_; }

 private:
  SharedFunctionInfo* shared_;
  bool has_activations_;
  std::vector<Address*> activation_pc_addresses_;
848 849
};

850 851 852 853
bool HasInterpreterActivations(
    Isolate* isolate, InterpreterActivationsFinder* activations_finder) {
  activations_finder->VisitThread(isolate, isolate->thread_local_top());
  isolate->thread_manager()->IterateArchivedThreads(activations_finder);
854 855 856 857
  if (FLAG_turbo_from_bytecode) {
    // If we are able to optimize functions directly from bytecode, then there
    // might be optimized functions that rely on bytecode being around. We need
    // to prevent switching the given function to baseline code in those cases.
858
    Deoptimizer::VisitAllOptimizedFunctions(isolate, activations_finder);
859
  }
860
  return activations_finder->has_activations();
861 862 863 864 865 866
}

MaybeHandle<Code> GetBaselineCode(Handle<JSFunction> function) {
  Isolate* isolate = function->GetIsolate();
  VMState<COMPILER> state(isolate);
  PostponeInterruptsScope postpone(isolate);
867 868 869
  Zone zone(isolate->allocator());
  ParseInfo parse_info(&zone, function);
  CompilationInfo info(&parse_info, function);
870 871 872 873 874 875

  // Reset profiler ticks, function is no longer considered hot.
  if (function->shared()->HasBytecodeArray()) {
    function->shared()->set_profiler_ticks(0);
  }

876 877 878 879 880
  // Nothing left to do if the function already has baseline code.
  if (function->shared()->code()->kind() == Code::FUNCTION) {
    return Handle<Code>(function->shared()->code());
  }

881 882 883 884 885 886
  // We do not switch to baseline code when the debugger might have created a
  // copy of the bytecode with break slots to be able to set break points.
  if (function->shared()->HasDebugInfo()) {
    return MaybeHandle<Code>();
  }

887 888 889 890 891
  // TODO(4280): For now we do not switch generators or async functions to
  // baseline code because there might be suspended activations stored in
  // generator objects on the heap. We could eventually go directly to
  // TurboFan in this case.
  if (function->shared()->is_resumable()) {
892 893 894 895
    return MaybeHandle<Code>();
  }

  // TODO(4280): For now we disable switching to baseline code in the presence
896 897 898 899 900 901
  // of interpreter activations of the given function. The reasons is that the
  // underlying bytecode is cleared below. Note that this only applies in case
  // the --ignition-preserve-bytecode flag is not passed.
  if (!FLAG_ignition_preserve_bytecode) {
    InterpreterActivationsFinder activations_finder(function->shared());
    if (HasInterpreterActivations(isolate, &activations_finder)) {
902 903
      if (FLAG_trace_opt) {
        OFStream os(stdout);
904 905
        os << "[unable to switch " << Brief(*function) << " due to activations]"
           << std::endl;
906 907
      }

908 909 910 911 912 913 914 915 916 917
      if (activations_finder.MarkActivationsForBaselineOnReturn(isolate)) {
        if (FLAG_trace_opt) {
          OFStream os(stdout);
          os << "[marking " << Brief(function->shared())
             << " for baseline recompilation on return]" << std::endl;
        }
      }

      return MaybeHandle<Code>();
    }
918 919 920 921 922 923 924 925 926 927 928 929 930 931 932 933 934 935 936 937 938 939
  }

  if (FLAG_trace_opt) {
    OFStream os(stdout);
    os << "[switching method " << Brief(*function) << " to baseline code]"
       << std::endl;
  }

  // Parse and update CompilationInfo with the results.
  if (!Parser::ParseStatic(info.parse_info())) return MaybeHandle<Code>();
  Handle<SharedFunctionInfo> shared = info.shared_info();
  DCHECK_EQ(shared->language_mode(), info.literal()->language_mode());

  // Compile baseline code using the full code generator.
  if (!Compiler::Analyze(info.parse_info()) ||
      !FullCodeGenerator::MakeCode(&info)) {
    if (!isolate->has_pending_exception()) isolate->StackOverflow();
    return MaybeHandle<Code>();
  }

  // TODO(4280): For now we play it safe and remove the bytecode array when we
  // switch to baseline code. We might consider keeping around the bytecode so
940 941
  // that it can be used as the "source of truth" eventually. Note that this
  // only applies in case the --ignition-preserve-bytecode flag is not passed.
942
  if (!FLAG_ignition_preserve_bytecode) shared->ClearBytecodeArray();
943 944 945 946 947 948 949 950

  // Update the shared function info with the scope info.
  InstallSharedScopeInfo(&info, shared);

  // Install compilation result on the shared function info
  InstallSharedCompilationResult(&info, shared);

  // Record the function compilation event.
951
  RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG, &info);
952 953 954 955

  return info.code();
}

956
MaybeHandle<Code> GetLazyCode(Handle<JSFunction> function) {
957 958
  Isolate* isolate = function->GetIsolate();
  DCHECK(!isolate->has_pending_exception());
959
  DCHECK(!function->is_compiled());
960
  TimerEventScope<TimerEventCompileCode> compile_timer(isolate);
961 962
  RuntimeCallTimerScope runtimeTimer(isolate,
                                     &RuntimeCallStats::CompileCodeLazy);
963
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
964
  AggregatedHistogramTimerScope timer(isolate->counters()->compile_lazy());
965 966

  if (FLAG_turbo_cache_shared_code) {
967 968 969 970 971 972 973 974 975 976
    Handle<Code> cached_code;
    if (GetCodeFromOptimizedCodeMap(function, BailoutId::None())
            .ToHandle(&cached_code)) {
      if (FLAG_trace_opt) {
        PrintF("[found optimized code for ");
        function->ShortPrint();
        PrintF(" during unoptimized compile]\n");
      }
      DCHECK(function->shared()->is_compiled());
      return cached_code;
977 978 979
    }
  }

980 981 982 983
  if (function->shared()->is_compiled()) {
    return Handle<Code>(function->shared()->code());
  }

984 985 986 987 988 989
  if (function->shared()->HasBytecodeArray()) {
    Handle<Code> entry = isolate->builtins()->InterpreterEntryTrampoline();
    function->shared()->ReplaceCode(*entry);
    return entry;
  }

990 991 992
  Zone zone(isolate->allocator());
  ParseInfo parse_info(&zone, function);
  CompilationInfo info(&parse_info, function);
993
  Handle<Code> result;
994
  ASSIGN_RETURN_ON_EXCEPTION(isolate, result, GetUnoptimizedCode(&info), Code);
995

996
  if (FLAG_always_opt) {
997
    Handle<Code> opt_code;
998
    if (GetOptimizedCode(function, Compiler::NOT_CONCURRENT)
999
            .ToHandle(&opt_code)) {
1000 1001
      result = opt_code;
    }
1002
  }
1003

1004 1005
  return result;
}
1006

1007

1008
Handle<SharedFunctionInfo> NewSharedFunctionInfoForLiteral(
1009 1010 1011 1012 1013 1014 1015 1016 1017 1018
    Isolate* isolate, FunctionLiteral* literal, Handle<Script> script) {
  Handle<Code> code = isolate->builtins()->CompileLazy();
  Handle<ScopeInfo> scope_info = handle(ScopeInfo::Empty(isolate));
  Handle<SharedFunctionInfo> result = isolate->factory()->NewSharedFunctionInfo(
      literal->name(), literal->materialized_literal_count(), literal->kind(),
      code, scope_info);
  SharedFunctionInfo::InitFromFunctionLiteral(result, literal);
  SharedFunctionInfo::SetScript(result, script);
  return result;
}
1019

1020
Handle<SharedFunctionInfo> CompileToplevel(CompilationInfo* info) {
1021
  Isolate* isolate = info->isolate();
1022
  TimerEventScope<TimerEventCompileCode> timer(isolate);
1023
  RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode);
1024
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
1025
  PostponeInterruptsScope postpone(isolate);
1026
  DCHECK(!isolate->native_context().is_null());
1027 1028
  ParseInfo* parse_info = info->parse_info();
  Handle<Script> script = parse_info->script();
1029

1030 1031
  // Create a canonical handle scope before internalizing parsed values if
  // compiling bytecode. This is required for off-thread bytecode generation.
1032 1033 1034
  std::unique_ptr<CanonicalHandleScope> canonical;
  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(isolate));

1035 1036
  // TODO(svenpanne) Obscure place for this, perhaps move to OnBeforeCompile?
  FixedArray* array = isolate->native_context()->embedder_data();
1037
  script->set_context_data(array->get(v8::Context::kDebugIdIndex));
1038

1039
  isolate->debug()->OnBeforeCompile(script);
1040

1041 1042
  DCHECK(parse_info->is_eval() || parse_info->is_global() ||
         parse_info->is_module());
1043

1044
  parse_info->set_toplevel();
1045

1046 1047 1048
  Handle<SharedFunctionInfo> result;

  { VMState<COMPILER> state(info->isolate());
1049
    if (parse_info->literal() == NULL) {
1050
      // Parse the script if needed (if it's already parsed, literal() is
1051 1052
      // non-NULL). If compiling for debugging, we may eagerly compile inner
      // functions, so do not parse lazily in that case.
1053 1054 1055 1056
      ScriptCompiler::CompileOptions options = parse_info->compile_options();
      bool parse_allow_lazy = (options == ScriptCompiler::kConsumeParserCache ||
                               String::cast(script->source())->length() >
                                   FLAG_min_preparse_length) &&
1057
                              !info->is_debug();
1058

1059 1060 1061
      // Consider parsing eagerly when targeting the code cache.
      parse_allow_lazy &= !(FLAG_serialize_eager && info->will_serialize());

1062 1063 1064 1065
      // Consider parsing eagerly when targeting Ignition.
      parse_allow_lazy &= !(FLAG_ignition && FLAG_ignition_eager &&
                            !isolate->serializer_enabled());

1066
      parse_info->set_allow_lazy_parsing(parse_allow_lazy);
1067
      if (!parse_allow_lazy &&
1068 1069
          (options == ScriptCompiler::kProduceParserCache ||
           options == ScriptCompiler::kConsumeParserCache)) {
1070 1071 1072 1073
        // We are going to parse eagerly, but we either 1) have cached data
        // produced by lazy parsing or 2) are asked to generate cached data.
        // Eager parsing cannot benefit from cached data, and producing cached
        // data while parsing eagerly is not implemented.
1074 1075
        parse_info->set_cached_data(nullptr);
        parse_info->set_compile_options(ScriptCompiler::kNoCompileOptions);
1076
      }
1077

1078
      if (!Parser::ParseStatic(parse_info)) {
1079 1080
        return Handle<SharedFunctionInfo>::null();
      }
1081 1082
    }

1083 1084
    DCHECK(!info->is_debug() || !parse_info->allow_lazy_parsing());

1085
    FunctionLiteral* lit = parse_info->literal();
1086 1087 1088 1089

    // Measure how long it takes to do the compilation; only take the
    // rest of the function into account to avoid overlap with the
    // parsing statistics.
1090 1091 1092
    RuntimeCallTimerScope runtimeTimer(
        isolate, parse_info->is_eval() ? &RuntimeCallStats::CompileEval
                                       : &RuntimeCallStats::Compile);
1093 1094 1095
    HistogramTimer* rate = parse_info->is_eval()
                               ? info->isolate()->counters()->compile_eval()
                               : info->isolate()->counters()->compile();
1096
    HistogramTimerScope timer(rate);
1097 1098
    TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
                 parse_info->is_eval() ? "V8.CompileEval" : "V8.Compile");
1099

1100
    // Allocate a shared function info object.
yangguo's avatar
yangguo committed
1101
    DCHECK_EQ(kNoSourcePosition, lit->function_token_position());
1102
    result = NewSharedFunctionInfoForLiteral(isolate, lit, script);
1103
    result->set_is_toplevel(true);
1104
    if (parse_info->is_eval()) {
1105 1106 1107
      // Eval scripts cannot be (re-)compiled without context.
      result->set_allows_lazy_compilation_without_context(false);
    }
1108
    parse_info->set_shared_info(result);
1109

1110
    // Compile the code.
1111
    if (!CompileUnoptimizedCode(info)) {
1112 1113 1114
      return Handle<SharedFunctionInfo>::null();
    }

1115 1116 1117
    // Update the shared function info with the scope info.
    InstallSharedScopeInfo(info, result);

1118
    // Install compilation result on the shared function info
1119
    InstallSharedCompilationResult(info, result);
1120

1121 1122 1123 1124
    Handle<String> script_name =
        script->name()->IsString()
            ? Handle<String>(String::cast(script->name()))
            : isolate->factory()->empty_string();
1125
    CodeEventListener::LogEventsAndTags log_tag =
1126
        parse_info->is_eval()
1127 1128
            ? CodeEventListener::EVAL_TAG
            : Logger::ToNativeByScript(CodeEventListener::SCRIPT_TAG, *script);
1129

1130
    PROFILE(isolate, CodeCreateEvent(log_tag, result->abstract_code(), *result,
1131
                                     *script_name));
1132

1133 1134
    if (!script.is_null())
      script->set_compilation_state(Script::COMPILATION_STATE_COMPILED);
1135 1136 1137 1138 1139
  }

  return result;
}

1140 1141 1142 1143 1144 1145 1146 1147
}  // namespace

// ----------------------------------------------------------------------------
// Implementation of Compiler

bool Compiler::Analyze(ParseInfo* info) {
  DCHECK_NOT_NULL(info->literal());
  if (!Rewriter::Rewrite(info)) return false;
1148
  DeclarationScope::Analyze(info, AnalyzeMode::kRegular);
1149 1150 1151 1152 1153 1154 1155
  if (!Renumber(info)) return false;
  DCHECK_NOT_NULL(info->scope());
  return true;
}

bool Compiler::ParseAndAnalyze(ParseInfo* info) {
  if (!Parser::ParseStatic(info)) return false;
1156 1157 1158 1159
  if (!Compiler::Analyze(info)) return false;
  DCHECK_NOT_NULL(info->literal());
  DCHECK_NOT_NULL(info->scope());
  return true;
1160 1161 1162 1163
}

bool Compiler::Compile(Handle<JSFunction> function, ClearExceptionFlag flag) {
  if (function->is_compiled()) return true;
1164 1165
  Isolate* isolate = function->GetIsolate();
  DCHECK(AllowCompilation::IsAllowed(isolate));
1166

1167
  // Start a compilation.
1168
  Handle<Code> code;
1169
  if (!GetLazyCode(function).ToHandle(&code)) {
1170
    if (flag == CLEAR_EXCEPTION) {
1171
      isolate->clear_pending_exception();
1172 1173 1174
    }
    return false;
  }
1175

1176
  // Install code on closure.
1177
  function->ReplaceCode(*code);
1178
  JSFunction::EnsureLiterals(function);
1179 1180 1181 1182

  // Check postconditions on success.
  DCHECK(!isolate->has_pending_exception());
  DCHECK(function->shared()->is_compiled());
1183 1184 1185 1186
  DCHECK(function->is_compiled());
  return true;
}

1187 1188 1189 1190 1191 1192 1193 1194 1195 1196 1197 1198 1199 1200 1201
bool Compiler::CompileBaseline(Handle<JSFunction> function) {
  Isolate* isolate = function->GetIsolate();
  DCHECK(AllowCompilation::IsAllowed(isolate));

  // Start a compilation.
  Handle<Code> code;
  if (!GetBaselineCode(function).ToHandle(&code)) {
    // Baseline generation failed, get unoptimized code.
    DCHECK(function->shared()->is_compiled());
    code = handle(function->shared()->code());
    isolate->clear_pending_exception();
  }

  // Install code on closure.
  function->ReplaceCode(*code);
1202
  JSFunction::EnsureLiterals(function);
1203 1204 1205 1206 1207 1208 1209 1210

  // Check postconditions on success.
  DCHECK(!isolate->has_pending_exception());
  DCHECK(function->shared()->is_compiled());
  DCHECK(function->is_compiled());
  return true;
}

1211 1212
bool Compiler::CompileOptimized(Handle<JSFunction> function,
                                ConcurrencyMode mode) {
1213 1214 1215 1216 1217
  if (function->IsOptimized()) return true;
  Isolate* isolate = function->GetIsolate();
  DCHECK(AllowCompilation::IsAllowed(isolate));

  // Start a compilation.
1218
  Handle<Code> code;
1219
  if (!GetOptimizedCode(function, mode).ToHandle(&code)) {
1220
    // Optimization failed, get unoptimized code.
1221 1222 1223
    DCHECK(!isolate->has_pending_exception());
    if (function->shared()->is_compiled()) {
      code = handle(function->shared()->code(), isolate);
1224 1225 1226
    } else if (function->shared()->HasBytecodeArray()) {
      code = isolate->builtins()->InterpreterEntryTrampoline();
      function->shared()->ReplaceCode(*code);
1227
    } else {
1228 1229 1230
      Zone zone(isolate->allocator());
      ParseInfo parse_info(&zone, function);
      CompilationInfo info(&parse_info, function);
1231 1232
      if (!GetUnoptimizedCode(&info).ToHandle(&code)) {
        return false;
1233 1234 1235 1236
      }
    }
  }

1237 1238
  // Install code on closure.
  function->ReplaceCode(*code);
1239
  JSFunction::EnsureLiterals(function);
1240 1241 1242 1243 1244

  // Check postconditions on success.
  DCHECK(!isolate->has_pending_exception());
  DCHECK(function->shared()->is_compiled());
  DCHECK(function->is_compiled());
1245 1246 1247 1248
  return true;
}

bool Compiler::CompileDebugCode(Handle<JSFunction> function) {
1249 1250 1251 1252 1253
  Isolate* isolate = function->GetIsolate();
  DCHECK(AllowCompilation::IsAllowed(isolate));

  // Start a compilation.
  Zone zone(isolate->allocator());
1254 1255 1256
  ParseInfo parse_info(&zone, function);
  CompilationInfo info(&parse_info, Handle<JSFunction>::null());
  if (IsEvalToplevel(handle(function->shared()))) {
1257
    parse_info.set_eval();
1258
    if (function->context()->IsNativeContext()) parse_info.set_global();
1259 1260
    parse_info.set_toplevel();
    parse_info.set_allow_lazy_parsing(false);
1261
    parse_info.set_lazy(false);
1262
  }
1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273
  info.MarkAsDebug();
  if (GetUnoptimizedCode(&info).is_null()) {
    isolate->clear_pending_exception();
    return false;
  }

  // Check postconditions on success.
  DCHECK(!isolate->has_pending_exception());
  DCHECK(function->shared()->is_compiled());
  DCHECK(function->shared()->HasDebugCode());
  return true;
1274 1275 1276
}

bool Compiler::CompileDebugCode(Handle<SharedFunctionInfo> shared) {
1277 1278 1279 1280 1281 1282 1283
  Isolate* isolate = shared->GetIsolate();
  DCHECK(AllowCompilation::IsAllowed(isolate));

  // Start a compilation.
  Zone zone(isolate->allocator());
  ParseInfo parse_info(&zone, shared);
  CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1284 1285
  DCHECK(shared->allows_lazy_compilation_without_context());
  DCHECK(!IsEvalToplevel(shared));
1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298
  info.MarkAsDebug();
  if (GetUnoptimizedCode(&info).is_null()) {
    isolate->clear_pending_exception();
    return false;
  }

  // Check postconditions on success.
  DCHECK(!isolate->has_pending_exception());
  DCHECK(shared->is_compiled());
  DCHECK(shared->HasDebugCode());
  return true;
}

1299
MaybeHandle<JSArray> Compiler::CompileForLiveEdit(Handle<Script> script) {
1300 1301 1302
  Isolate* isolate = script->GetIsolate();
  DCHECK(AllowCompilation::IsAllowed(isolate));

1303 1304 1305 1306 1307 1308
  // In order to ensure that live edit function info collection finds the newly
  // generated shared function infos, clear the script's list temporarily
  // and restore it at the end of this method.
  Handle<Object> old_function_infos(script->shared_function_infos(), isolate);
  script->set_shared_function_infos(Smi::FromInt(0));

1309 1310 1311
  // Start a compilation.
  Zone zone(isolate->allocator());
  ParseInfo parse_info(&zone, script);
1312
  CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1313
  parse_info.set_global();
1314
  info.MarkAsDebug();
1315

1316
  // TODO(635): support extensions.
1317
  const bool compilation_succeeded = !CompileToplevel(&info).is_null();
1318 1319 1320 1321 1322 1323 1324
  Handle<JSArray> infos;
  if (compilation_succeeded) {
    // Check postconditions on success.
    DCHECK(!isolate->has_pending_exception());
    infos = LiveEditFunctionTracker::Collect(parse_info.literal(), script,
                                             &zone, isolate);
  }
1325 1326 1327 1328 1329 1330

  // Restore the original function info list in order to remain side-effect
  // free as much as possible, since some code expects the old shared function
  // infos to stick around.
  script->set_shared_function_infos(*old_function_infos);

1331
  return infos;
1332 1333
}

1334 1335 1336
bool Compiler::EnsureBytecode(CompilationInfo* info) {
  DCHECK(ShouldUseIgnition(info));
  if (!info->shared_info()->HasBytecodeArray()) {
1337
    Handle<Code> original_code(info->shared_info()->code());
1338
    if (GetUnoptimizedCode(info).is_null()) return false;
1339
    if (info->shared_info()->HasAsmWasmData()) return false;
1340 1341 1342 1343 1344 1345 1346
    DCHECK(info->shared_info()->is_compiled());
    if (original_code->kind() == Code::FUNCTION) {
      // Generating bytecode will install the {InterpreterEntryTrampoline} as
      // shared code on the function. To avoid an implicit tier down we restore
      // original baseline code in case it existed beforehand.
      info->shared_info()->ReplaceCode(*original_code);
    }
1347 1348 1349 1350 1351
  }
  DCHECK(info->shared_info()->HasBytecodeArray());
  return true;
}

1352 1353 1354 1355
// TODO(turbofan): In the future, unoptimized code with deopt support could
// be generated lazily once deopt is triggered.
bool Compiler::EnsureDeoptimizationSupport(CompilationInfo* info) {
  DCHECK_NOT_NULL(info->literal());
1356
  DCHECK_NOT_NULL(info->scope());
1357 1358
  Handle<SharedFunctionInfo> shared = info->shared_info();
  if (!shared->has_deoptimization_support()) {
1359 1360
    Zone zone(info->isolate()->allocator());
    CompilationInfo unoptimized(info->parse_info(), info->closure());
1361
    unoptimized.EnableDeoptimizationSupport();
1362

1363 1364 1365 1366 1367
    // TODO(4280): For now we do not switch generators or async functions to
    // baseline code because there might be suspended activations stored in
    // generator objects on the heap. We could eventually go directly to
    // TurboFan in this case.
    if (shared->is_resumable()) return false;
1368

1369
    // TODO(4280): For now we disable switching to baseline code in the presence
1370 1371 1372 1373 1374 1375
    // of interpreter activations of the given function. The reasons is that the
    // underlying bytecode is cleared below. The expensive check for activations
    // only needs to be done when the given function has bytecode, otherwise we
    // can be sure there are no activations. Note that this only applies in case
    // the --ignition-preserve-bytecode flag is not passed.
    if (!FLAG_ignition_preserve_bytecode && shared->HasBytecodeArray()) {
1376 1377 1378 1379
      InterpreterActivationsFinder activations_finder(*shared);
      if (HasInterpreterActivations(info->isolate(), &activations_finder)) {
        return false;
      }
1380
    }
1381

1382 1383 1384 1385 1386 1387 1388
    // If the current code has reloc info for serialization, also include
    // reloc info for serialization for the new code, so that deopt support
    // can be added without losing IC state.
    if (shared->code()->kind() == Code::FUNCTION &&
        shared->code()->has_reloc_info_for_serialization()) {
      unoptimized.PrepareForSerializing();
    }
1389
    EnsureFeedbackMetadata(&unoptimized);
1390 1391
    if (!FullCodeGenerator::MakeCode(&unoptimized)) return false;

1392 1393
    // TODO(4280): For now we play it safe and remove the bytecode array when we
    // switch to baseline code. We might consider keeping around the bytecode so
1394 1395 1396 1397
    // that it can be used as the "source of truth" eventually. Note that this
    // only applies in case the --ignition-preserve-bytecode flag is not passed.
    if (!FLAG_ignition_preserve_bytecode && shared->HasBytecodeArray()) {
      shared->ClearBytecodeArray();
1398
    }
1399 1400 1401 1402

    // The scope info might not have been set if a lazily compiled
    // function is inlined before being called for the first time.
    if (shared->scope_info() == ScopeInfo::Empty(info->isolate())) {
1403
      InstallSharedScopeInfo(info, shared);
1404 1405
    }

1406 1407 1408
    // Install compilation result on the shared function info
    shared->EnableDeoptimizationSupport(*unoptimized.code());

1409
    // The existing unoptimized code was replaced with the new one.
1410 1411
    RecordFunctionCompilation(CodeEventListener::LAZY_COMPILE_TAG,
                              &unoptimized);
1412 1413 1414 1415
  }
  return true;
}

1416 1417 1418 1419 1420 1421 1422 1423 1424 1425 1426 1427 1428 1429
// static
Compiler::CompilationTier Compiler::NextCompilationTier(JSFunction* function) {
  Handle<SharedFunctionInfo> shared(function->shared(), function->GetIsolate());
  if (shared->code()->is_interpreter_trampoline_builtin()) {
    if (FLAG_turbo_from_bytecode && UseTurboFan(shared)) {
      return OPTIMIZED;
    } else {
      return BASELINE;
    }
  } else {
    return OPTIMIZED;
  }
}

1430
MaybeHandle<JSFunction> Compiler::GetFunctionFromEval(
1431
    Handle<String> source, Handle<SharedFunctionInfo> outer_info,
1432
    Handle<Context> context, LanguageMode language_mode,
1433 1434 1435
    ParseRestriction restriction, int eval_scope_position, int eval_position,
    int line_offset, int column_offset, Handle<Object> script_name,
    ScriptOriginOptions options) {
1436 1437 1438 1439 1440 1441
  Isolate* isolate = source->GetIsolate();
  int source_length = source->length();
  isolate->counters()->total_eval_size()->Increment(source_length);
  isolate->counters()->total_compile_size()->Increment(source_length);

  CompilationCache* compilation_cache = isolate->compilation_cache();
1442
  MaybeHandle<SharedFunctionInfo> maybe_shared_info =
1443
      compilation_cache->LookupEval(source, outer_info, context, language_mode,
1444
                                    eval_scope_position);
1445
  Handle<SharedFunctionInfo> shared_info;
1446

1447
  Handle<Script> script;
1448
  if (!maybe_shared_info.ToHandle(&shared_info)) {
1449
    script = isolate->factory()->NewScript(source);
1450 1451
    if (!script_name.is_null()) {
      script->set_name(*script_name);
1452 1453
      script->set_line_offset(line_offset);
      script->set_column_offset(column_offset);
1454 1455
    }
    script->set_origin_options(options);
1456
    script->set_compilation_type(Script::COMPILATION_TYPE_EVAL);
1457
    Script::SetEvalOrigin(script, outer_info, eval_position);
1458

1459
    Zone zone(isolate->allocator());
1460
    ParseInfo parse_info(&zone, script);
1461
    CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1462 1463 1464 1465 1466
    parse_info.set_eval();
    if (context->IsNativeContext()) parse_info.set_global();
    parse_info.set_language_mode(language_mode);
    parse_info.set_parse_restriction(restriction);
    parse_info.set_context(context);
1467 1468 1469 1470

    shared_info = CompileToplevel(&info);

    if (shared_info.is_null()) {
1471
      return MaybeHandle<JSFunction>();
1472
    } else {
1473
      // If caller is strict mode, the result must be in strict mode as well.
1474 1475
      DCHECK(is_sloppy(language_mode) ||
             is_strict(shared_info->language_mode()));
1476
      compilation_cache->PutEval(source, outer_info, context, shared_info,
1477
                                 eval_scope_position);
1478 1479 1480
    }
  }

1481 1482 1483 1484 1485 1486 1487 1488 1489 1490
  Handle<JSFunction> result =
      isolate->factory()->NewFunctionFromSharedFunctionInfo(
          shared_info, context, NOT_TENURED);

  // OnAfterCompile has to be called after we create the JSFunction, which we
  // may require to recompile the eval for debugging, if we find a function
  // that contains break points in the eval script.
  isolate->debug()->OnAfterCompile(script);

  return result;
1491 1492
}

1493 1494 1495 1496 1497 1498 1499 1500 1501 1502 1503 1504 1505 1506 1507 1508 1509 1510 1511 1512 1513 1514 1515 1516 1517 1518 1519 1520 1521 1522 1523 1524 1525 1526 1527 1528 1529 1530 1531 1532 1533 1534 1535 1536 1537 1538
namespace {

bool CodeGenerationFromStringsAllowed(Isolate* isolate,
                                      Handle<Context> context) {
  DCHECK(context->allow_code_gen_from_strings()->IsFalse(isolate));
  // Check with callback if set.
  AllowCodeGenerationFromStringsCallback callback =
      isolate->allow_code_gen_callback();
  if (callback == NULL) {
    // No callback set and code generation disallowed.
    return false;
  } else {
    // Callback set. Let it decide if code generation is allowed.
    VMState<EXTERNAL> state(isolate);
    return callback(v8::Utils::ToLocal(context));
  }
}

}  // namespace

MaybeHandle<JSFunction> Compiler::GetFunctionFromString(
    Handle<Context> context, Handle<String> source,
    ParseRestriction restriction) {
  Isolate* const isolate = context->GetIsolate();
  Handle<Context> native_context(context->native_context(), isolate);

  // Check if native context allows code generation from
  // strings. Throw an exception if it doesn't.
  if (native_context->allow_code_gen_from_strings()->IsFalse(isolate) &&
      !CodeGenerationFromStringsAllowed(isolate, native_context)) {
    Handle<Object> error_message =
        native_context->ErrorMessageForCodeGenerationFromStrings();
    THROW_NEW_ERROR(isolate, NewEvalError(MessageTemplate::kCodeGenFromStrings,
                                          error_message),
                    JSFunction);
  }

  // Compile source string in the native context.
  int eval_scope_position = 0;
  int eval_position = kNoSourcePosition;
  Handle<SharedFunctionInfo> outer_info(native_context->closure()->shared());
  return Compiler::GetFunctionFromEval(source, outer_info, native_context,
                                       SLOPPY, restriction, eval_scope_position,
                                       eval_position);
}

1539
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForScript(
1540
    Handle<String> source, Handle<Object> script_name, int line_offset,
1541 1542 1543
    int column_offset, ScriptOriginOptions resource_options,
    Handle<Object> source_map_url, Handle<Context> context,
    v8::Extension* extension, ScriptData** cached_data,
1544 1545
    ScriptCompiler::CompileOptions compile_options, NativesFlag natives,
    bool is_module) {
1546
  Isolate* isolate = source->GetIsolate();
1547
  if (compile_options == ScriptCompiler::kNoCompileOptions) {
1548
    cached_data = NULL;
1549 1550
  } else if (compile_options == ScriptCompiler::kProduceParserCache ||
             compile_options == ScriptCompiler::kProduceCodeCache) {
1551 1552
    DCHECK(cached_data && !*cached_data);
    DCHECK(extension == NULL);
1553
    DCHECK(!isolate->debug()->is_loaded());
1554
  } else {
1555
    DCHECK(compile_options == ScriptCompiler::kConsumeParserCache ||
1556
           compile_options == ScriptCompiler::kConsumeCodeCache);
1557 1558
    DCHECK(cached_data && *cached_data);
    DCHECK(extension == NULL);
1559
  }
1560 1561 1562 1563
  int source_length = source->length();
  isolate->counters()->total_load_size()->Increment(source_length);
  isolate->counters()->total_compile_size()->Increment(source_length);

1564
  LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1565 1566 1567
  CompilationCache* compilation_cache = isolate->compilation_cache();

  // Do a lookup in the compilation cache but not for extensions.
1568
  MaybeHandle<SharedFunctionInfo> maybe_result;
1569
  Handle<SharedFunctionInfo> result;
1570
  if (extension == NULL) {
1571
    // First check per-isolate compilation cache.
1572
    maybe_result = compilation_cache->LookupScript(
1573 1574
        source, script_name, line_offset, column_offset, resource_options,
        context, language_mode);
1575
    if (maybe_result.is_null() && FLAG_serialize_toplevel &&
1576 1577
        compile_options == ScriptCompiler::kConsumeCodeCache &&
        !isolate->debug()->is_loaded()) {
1578
      // Then check cached code provided by embedder.
1579
      HistogramTimerScope timer(isolate->counters()->compile_deserialize());
1580 1581
      RuntimeCallTimerScope runtimeTimer(isolate,
                                         &RuntimeCallStats::CompileDeserialize);
1582 1583
      TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
                   "V8.CompileDeserialize");
1584 1585 1586
      Handle<SharedFunctionInfo> result;
      if (CodeSerializer::Deserialize(isolate, *cached_data, source)
              .ToHandle(&result)) {
1587 1588
        // Promote to per-isolate compilation cache.
        compilation_cache->PutScript(source, context, language_mode, result);
1589 1590 1591
        return result;
      }
      // Deserializer failed. Fall through to compile.
1592
    }
1593 1594
  }

1595 1596 1597 1598 1599 1600
  base::ElapsedTimer timer;
  if (FLAG_profile_deserialization && FLAG_serialize_toplevel &&
      compile_options == ScriptCompiler::kProduceCodeCache) {
    timer.Start();
  }

1601 1602 1603 1604
  if (!maybe_result.ToHandle(&result) ||
      (FLAG_serialize_toplevel &&
       compile_options == ScriptCompiler::kProduceCodeCache)) {
    // No cache entry found, or embedder wants a code cache. Compile the script.
1605 1606

    // Create a script object describing the script to be compiled.
1607
    Handle<Script> script = isolate->factory()->NewScript(source);
1608
    if (natives == NATIVES_CODE) {
1609
      script->set_type(Script::TYPE_NATIVE);
1610
      script->set_hide_source(true);
1611 1612 1613
    } else if (natives == EXTENSION_CODE) {
      script->set_type(Script::TYPE_EXTENSION);
      script->set_hide_source(true);
1614
    }
1615 1616
    if (!script_name.is_null()) {
      script->set_name(*script_name);
1617 1618
      script->set_line_offset(line_offset);
      script->set_column_offset(column_offset);
1619
    }
1620
    script->set_origin_options(resource_options);
1621 1622 1623
    if (!source_map_url.is_null()) {
      script->set_source_mapping_url(*source_map_url);
    }
1624 1625

    // Compile the function and add it to the cache.
1626
    Zone zone(isolate->allocator());
1627
    ParseInfo parse_info(&zone, script);
1628
    CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1629
    if (is_module) {
1630
      parse_info.set_module();
1631
    } else {
1632
      parse_info.set_global();
1633
    }
1634
    if (compile_options != ScriptCompiler::kNoCompileOptions) {
1635
      parse_info.set_cached_data(cached_data);
1636
    }
1637 1638 1639
    parse_info.set_compile_options(compile_options);
    parse_info.set_extension(extension);
    parse_info.set_context(context);
1640 1641
    if (FLAG_serialize_toplevel &&
        compile_options == ScriptCompiler::kProduceCodeCache) {
1642 1643
      info.PrepareForSerializing();
    }
1644

1645
    parse_info.set_language_mode(
1646
        static_cast<LanguageMode>(parse_info.language_mode() | language_mode));
1647
    result = CompileToplevel(&info);
1648
    if (extension == NULL && !result.is_null()) {
1649
      compilation_cache->PutScript(source, context, language_mode, result);
1650
      if (FLAG_serialize_toplevel &&
1651
          compile_options == ScriptCompiler::kProduceCodeCache) {
1652 1653
        HistogramTimerScope histogram_timer(
            isolate->counters()->compile_serialize());
1654 1655
        RuntimeCallTimerScope runtimeTimer(isolate,
                                           &RuntimeCallStats::CompileSerialize);
1656 1657
        TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"),
                     "V8.CompileSerialize");
1658
        *cached_data = CodeSerializer::Serialize(isolate, result, source);
1659
        if (FLAG_profile_deserialization) {
1660 1661
          PrintF("[Compiling and serializing took %0.3f ms]\n",
                 timer.Elapsed().InMillisecondsF());
1662
        }
1663
      }
1664
    }
1665

1666 1667 1668 1669 1670
    if (result.is_null()) {
      isolate->ReportPendingMessages();
    } else {
      isolate->debug()->OnAfterCompile(script);
    }
1671
  } else if (result->ic_age() != isolate->heap()->global_ic_age()) {
1672
    result->ResetForNewContext(isolate->heap()->global_ic_age());
1673 1674 1675 1676
  }
  return result;
}

1677
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForStreamedScript(
1678 1679 1680
    Handle<Script> script, ParseInfo* parse_info, int source_length) {
  Isolate* isolate = script->GetIsolate();
  // TODO(titzer): increment the counters in caller.
1681 1682 1683
  isolate->counters()->total_load_size()->Increment(source_length);
  isolate->counters()->total_compile_size()->Increment(source_length);

1684
  LanguageMode language_mode = construct_language_mode(FLAG_use_strict);
1685 1686
  parse_info->set_language_mode(
      static_cast<LanguageMode>(parse_info->language_mode() | language_mode));
1687

1688
  CompilationInfo compile_info(parse_info, Handle<JSFunction>::null());
1689

1690 1691
  // The source was parsed lazily, so compiling for debugging is not possible.
  DCHECK(!compile_info.is_debug());
1692

1693 1694 1695
  Handle<SharedFunctionInfo> result = CompileToplevel(&compile_info);
  if (!result.is_null()) isolate->debug()->OnAfterCompile(script);
  return result;
1696 1697 1698
}


1699
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfo(
1700 1701
    FunctionLiteral* literal, Handle<Script> script,
    CompilationInfo* outer_info) {
1702
  // Precondition: code has been parsed and scopes have been analyzed.
1703
  Isolate* isolate = outer_info->isolate();
1704
  MaybeHandle<SharedFunctionInfo> maybe_existing;
1705 1706

  // Find any previously allocated shared function info for the given literal.
1707
  if (outer_info->shared_info()->never_compiled()) {
1708
    // On the first compile, there are no existing shared function info for
1709 1710
    // inner functions yet, so do not try to find them. All bets are off for
    // live edit though.
1711 1712
    SLOW_DCHECK(script->FindSharedFunctionInfo(literal).is_null() ||
                isolate->debug()->live_edit_enabled());
1713 1714 1715
  } else {
    maybe_existing = script->FindSharedFunctionInfo(literal);
  }
1716

1717 1718 1719 1720 1721
  // We found an existing shared function info. If it has any sort of code
  // attached, don't worry about compiling and simply return it. Otherwise,
  // continue to decide whether to eagerly compile.
  // Note that we also carry on if we are compiling eager to obtain code for
  // debugging, unless we already have code with debug break slots.
1722
  Handle<SharedFunctionInfo> existing;
1723
  if (maybe_existing.ToHandle(&existing)) {
1724
    DCHECK(!existing->is_toplevel());
1725 1726 1727 1728
    if (existing->HasBaselineCode() || existing->HasBytecodeArray()) {
      if (!outer_info->is_debug() || existing->HasDebugCode()) {
        return existing;
      }
1729
    }
1730 1731
  }

1732 1733 1734 1735 1736
  // Allocate a shared function info object.
  Handle<SharedFunctionInfo> result;
  if (!maybe_existing.ToHandle(&result)) {
    result = NewSharedFunctionInfoForLiteral(isolate, literal, script);
    result->set_is_toplevel(false);
1737 1738 1739 1740

    // If the outer function has been compiled before, we cannot be sure that
    // shared function info for this function literal has been created for the
    // first time. It may have already been compiled previously.
1741
    result->set_never_compiled(outer_info->shared_info()->never_compiled());
1742 1743
  }

1744
  Zone zone(isolate->allocator());
1745
  ParseInfo parse_info(&zone, script);
1746
  CompilationInfo info(&parse_info, Handle<JSFunction>::null());
1747
  parse_info.set_literal(literal);
1748
  parse_info.set_shared_info(result);
1749
  parse_info.set_language_mode(literal->scope()->language_mode());
1750
  if (outer_info->will_serialize()) info.PrepareForSerializing();
1751
  if (outer_info->is_debug()) info.MarkAsDebug();
1752

1753 1754 1755 1756 1757 1758 1759 1760 1761
  // Determine if the function can be lazily compiled. This is necessary to
  // allow some of our builtin JS files to be lazily compiled. These
  // builtins cannot be handled lazily by the parser, since we have to know
  // if a function uses the special natives syntax, which is something the
  // parser records.
  // If the debugger requests compilation for break points, we cannot be
  // aggressive about lazy compilation, because it might trigger compilation
  // of functions without an outer context when setting a breakpoint through
  // Debug::FindSharedFunctionInfoInScript.
1762
  bool allow_lazy = literal->AllowsLazyCompilation() && !info.is_debug();
1763 1764
  bool lazy = FLAG_lazy && allow_lazy && !literal->should_eager_compile();

1765 1766 1767
  // Consider compiling eagerly when targeting the code cache.
  lazy &= !(FLAG_serialize_eager && info.will_serialize());

1768 1769 1770 1771
  // Consider compiling eagerly when compiling bytecode for Ignition.
  lazy &=
      !(FLAG_ignition && FLAG_ignition_eager && !isolate->serializer_enabled());

1772
  // Generate code
1773
  TimerEventScope<TimerEventCompileCode> timer(isolate);
1774
  RuntimeCallTimerScope runtimeTimer(isolate, &RuntimeCallStats::CompileCode);
1775
  TRACE_EVENT0(TRACE_DISABLED_BY_DEFAULT("v8.compile"), "V8.CompileCode");
1776 1777 1778 1779 1780 1781 1782

  // Create a canonical handle scope if compiling ignition bytecode. This is
  // required by the constant array builder to de-duplicate common objects
  // without dereferencing handles.
  std::unique_ptr<CanonicalHandleScope> canonical;
  if (FLAG_ignition) canonical.reset(new CanonicalHandleScope(info.isolate()));

1783
  if (lazy) {
1784
    info.SetCode(isolate->builtins()->CompileLazy());
1785
  } else if (Renumber(info.parse_info()) && GenerateUnoptimizedCode(&info)) {
1786
    // Code generation will ensure that the feedback vector is present and
1787
    // appropriately sized.
1788
    DCHECK(!info.code().is_null());
1789 1790 1791 1792
    if (literal->should_eager_compile() &&
        literal->should_be_used_once_hint()) {
      info.code()->MarkToBeExecutedOnce(isolate);
    }
1793 1794
    // Update the shared function info with the scope info.
    InstallSharedScopeInfo(&info, result);
1795
    // Install compilation result on the shared function info.
1796
    InstallSharedCompilationResult(&info, result);
1797
  } else {
1798
    return Handle<SharedFunctionInfo>::null();
1799
  }
1800

1801
  if (maybe_existing.is_null()) {
1802
    RecordFunctionCompilation(CodeEventListener::FUNCTION_TAG, &info);
1803 1804 1805
  }

  return result;
1806 1807
}

1808 1809 1810 1811 1812 1813 1814 1815 1816 1817 1818 1819 1820 1821 1822 1823 1824 1825
Handle<SharedFunctionInfo> Compiler::GetSharedFunctionInfoForNative(
    v8::Extension* extension, Handle<String> name) {
  Isolate* isolate = name->GetIsolate();
  v8::Isolate* v8_isolate = reinterpret_cast<v8::Isolate*>(isolate);

  // Compute the function template for the native function.
  v8::Local<v8::FunctionTemplate> fun_template =
      extension->GetNativeFunctionTemplate(v8_isolate,
                                           v8::Utils::ToLocal(name));
  DCHECK(!fun_template.IsEmpty());

  // Instantiate the function and create a shared function info from it.
  Handle<JSFunction> fun = Handle<JSFunction>::cast(Utils::OpenHandle(
      *fun_template->GetFunction(v8_isolate->GetCurrentContext())
           .ToLocalChecked()));
  Handle<Code> code = Handle<Code>(fun->shared()->code());
  Handle<Code> construct_stub = Handle<Code>(fun->shared()->construct_stub());
  Handle<SharedFunctionInfo> shared = isolate->factory()->NewSharedFunctionInfo(
1826
      name, fun->shared()->num_literals(), FunctionKind::kNormalFunction, code,
1827
      Handle<ScopeInfo>(fun->shared()->scope_info()));
1828
  shared->SetConstructStub(*construct_stub);
1829
  shared->set_feedback_metadata(fun->shared()->feedback_metadata());
1830 1831 1832 1833 1834 1835 1836 1837

  // Copy the function data to the shared function info.
  shared->set_function_data(fun->shared()->function_data());
  int parameters = fun->shared()->internal_formal_parameter_count();
  shared->set_internal_formal_parameter_count(parameters);

  return shared;
}
1838

1839 1840 1841
MaybeHandle<Code> Compiler::GetOptimizedCodeForOSR(Handle<JSFunction> function,
                                                   BailoutId osr_ast_id,
                                                   JavaScriptFrame* osr_frame) {
1842
  DCHECK(!osr_ast_id.IsNone());
1843 1844
  DCHECK_NOT_NULL(osr_frame);
  return GetOptimizedCode(function, NOT_CONCURRENT, osr_ast_id, osr_frame);
1845 1846
}

1847 1848 1849 1850 1851 1852 1853 1854 1855 1856 1857
CompilationJob* Compiler::PrepareUnoptimizedCompilationJob(
    CompilationInfo* info) {
  VMState<COMPILER> state(info->isolate());
  std::unique_ptr<CompilationJob> job(GetUnoptimizedCompilationJob(info));
  if (job->PrepareJob() != CompilationJob::SUCCEEDED) {
    return nullptr;
  }
  return job.release();
}

bool Compiler::FinalizeCompilationJob(CompilationJob* raw_job) {
1858
  // Take ownership of compilation job.  Deleting job also tears down the zone.
1859
  std::unique_ptr<CompilationJob> job(raw_job);
1860

1861 1862 1863 1864 1865 1866 1867
  VMState<COMPILER> state(job->info()->isolate());
  if (job->info()->IsOptimizing()) {
    return FinalizeOptimizedCompilationJob(job.get()) ==
           CompilationJob::SUCCEEDED;
  } else {
    return FinalizeUnoptimizedCompilationJob(job.get()) ==
           CompilationJob::SUCCEEDED;
1868
  }
1869 1870
}

1871 1872 1873 1874 1875 1876 1877 1878 1879 1880 1881 1882 1883 1884 1885 1886 1887 1888
void Compiler::PostInstantiation(Handle<JSFunction> function,
                                 PretenureFlag pretenure) {
  Handle<SharedFunctionInfo> shared(function->shared());

  if (FLAG_always_opt && shared->allows_lazy_compilation()) {
    function->MarkForOptimization();
  }

  CodeAndLiterals cached = shared->SearchOptimizedCodeMap(
      function->context()->native_context(), BailoutId::None());
  if (cached.code != nullptr) {
    // Caching of optimized code enabled and optimized code found.
    DCHECK(!cached.code->marked_for_deoptimization());
    DCHECK(function->shared()->is_compiled());
    function->ReplaceCode(cached.code);
  }

  if (cached.literals != nullptr) {
1889
    DCHECK(shared->is_compiled());
1890
    function->set_literals(cached.literals);
1891 1892 1893
  } else if (shared->is_compiled()) {
    // TODO(mvstanton): pass pretenure flag to EnsureLiterals.
    JSFunction::EnsureLiterals(function);
1894 1895
  }
}
1896

1897 1898
}  // namespace internal
}  // namespace v8