isolate.cc 174 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4

5
#include "src/execution/isolate.h"
6

7 8
#include <stdlib.h>

9
#include <atomic>
10
#include <fstream>  // NOLINT(readability/streams)
11
#include <memory>
12
#include <sstream>
13
#include <unordered_map>
14

15
#include "src/api/api-inl.h"
16
#include "src/ast/ast-value-factory.h"
17
#include "src/ast/scopes.h"
18
#include "src/base/adapters.h"
19
#include "src/base/hashmap.h"
20
#include "src/base/platform/platform.h"
21
#include "src/base/sys-info.h"
22
#include "src/base/utils/random-number-generator.h"
23
#include "src/builtins/builtins-promise.h"
24
#include "src/builtins/constants-table-builder.h"
25 26
#include "src/codegen/assembler-inl.h"
#include "src/codegen/compilation-cache.h"
27
#include "src/common/ptr-compr.h"
28
#include "src/compiler-dispatcher/compiler-dispatcher.h"
29
#include "src/compiler-dispatcher/optimizing-compile-dispatcher.h"
Yang Guo's avatar
Yang Guo committed
30
#include "src/date/date.h"
31
#include "src/debug/debug-frames.h"
32
#include "src/debug/debug.h"
33
#include "src/deoptimizer/deoptimizer.h"
34
#include "src/diagnostics/compilation-statistics.h"
35 36 37 38 39 40
#include "src/execution/frames-inl.h"
#include "src/execution/isolate-inl.h"
#include "src/execution/messages.h"
#include "src/execution/microtask-queue.h"
#include "src/execution/runtime-profiler.h"
#include "src/execution/simulator.h"
41
#include "src/execution/v8threads.h"
42
#include "src/execution/vm-state-inl.h"
43
#include "src/heap/heap-inl.h"
44
#include "src/heap/read-only-heap.h"
45
#include "src/ic/stub-cache.h"
46 47
#include "src/init/bootstrapper.h"
#include "src/init/setup-isolate.h"
48
#include "src/init/v8.h"
49
#include "src/interpreter/interpreter.h"
50
#include "src/libsampler/sampler.h"
51
#include "src/logging/counters.h"
52
#include "src/logging/log.h"
53
#include "src/numbers/hash-seed-inl.h"
54
#include "src/objects/elements.h"
55
#include "src/objects/frame-array-inl.h"
56
#include "src/objects/hash-table-inl.h"
57
#include "src/objects/js-array-inl.h"
58
#include "src/objects/js-generator-inl.h"
59
#include "src/objects/js-weak-refs-inl.h"
60
#include "src/objects/module-inl.h"
61
#include "src/objects/promise-inl.h"
62
#include "src/objects/prototype.h"
63
#include "src/objects/slots.h"
64
#include "src/objects/smi.h"
65
#include "src/objects/stack-frame-info-inl.h"
66
#include "src/objects/visitors.h"
67
#include "src/profiler/heap-profiler.h"
68
#include "src/profiler/tracing-cpu-profiler.h"
69
#include "src/regexp/regexp-stack.h"
70 71
#include "src/snapshot/embedded/embedded-data.h"
#include "src/snapshot/embedded/embedded-file-writer.h"
72
#include "src/snapshot/read-only-deserializer.h"
73
#include "src/snapshot/startup-deserializer.h"
74 75
#include "src/strings/string-builder-inl.h"
#include "src/strings/string-stream.h"
76
#include "src/tasks/cancelable-task.h"
77
#include "src/tracing/tracing-category-observer.h"
78
#include "src/trap-handler/trap-handler.h"
79 80
#include "src/utils/ostreams.h"
#include "src/utils/version.h"
81
#include "src/wasm/wasm-code-manager.h"
82
#include "src/wasm/wasm-engine.h"
83
#include "src/wasm/wasm-objects.h"
84
#include "src/zone/accounting-allocator.h"
85 86 87
#ifdef V8_INTL_SUPPORT
#include "unicode/uobject.h"
#endif  // V8_INTL_SUPPORT
88

89
#if defined(V8_OS_WIN64)
90
#include "src/diagnostics/unwinding-info-win64.h"
91
#endif  // V8_OS_WIN64
92

93 94 95
extern "C" const uint8_t* v8_Default_embedded_blob_;
extern "C" uint32_t v8_Default_embedded_blob_size_;

96 97 98
namespace v8 {
namespace internal {

99 100 101 102 103 104 105 106 107 108 109 110
#ifdef DEBUG
#define TRACE_ISOLATE(tag)                                                  \
  do {                                                                      \
    if (FLAG_trace_isolates) {                                              \
      PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast<void*>(this), \
             id());                                                         \
    }                                                                       \
  } while (false)
#else
#define TRACE_ISOLATE(tag)
#endif

111 112
const uint8_t* DefaultEmbeddedBlob() { return v8_Default_embedded_blob_; }
uint32_t DefaultEmbeddedBlobSize() { return v8_Default_embedded_blob_size_; }
113

114
#ifdef V8_MULTI_SNAPSHOTS
115 116 117 118 119
extern "C" const uint8_t* v8_Trusted_embedded_blob_;
extern "C" uint32_t v8_Trusted_embedded_blob_size_;

const uint8_t* TrustedEmbeddedBlob() { return v8_Trusted_embedded_blob_; }
uint32_t TrustedEmbeddedBlobSize() { return v8_Trusted_embedded_blob_size_; }
120 121
#endif

122 123 124 125 126 127 128 129 130 131 132
namespace {
// These variables provide access to the current embedded blob without requiring
// an isolate instance. This is needed e.g. by Code::InstructionStart, which may
// not have access to an isolate but still needs to access the embedded blob.
// The variables are initialized by each isolate in Init(). Writes and reads are
// relaxed since we can guarantee that the current thread has initialized these
// variables before accessing them. Different threads may race, but this is fine
// since they all attempt to set the same values of the blob pointer and size.

std::atomic<const uint8_t*> current_embedded_blob_(nullptr);
std::atomic<uint32_t> current_embedded_blob_size_(0);
133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175

// The various workflows around embedded snapshots are fairly complex. We need
// to support plain old snapshot builds, nosnap builds, and the requirements of
// subtly different serialization tests. There's two related knobs to twiddle:
//
// - The default embedded blob may be overridden by setting the sticky embedded
// blob. This is set automatically whenever we create a new embedded blob.
//
// - Lifecycle management can be either manual or set to refcounting.
//
// A few situations to demonstrate their use:
//
// - A plain old snapshot build neither overrides the default blob nor
// refcounts.
//
// - mksnapshot sets the sticky blob and manually frees the embedded
// blob once done.
//
// - Most serializer tests do the same.
//
// - Nosnapshot builds set the sticky blob and enable refcounting.

// This mutex protects access to the following variables:
// - sticky_embedded_blob_
// - sticky_embedded_blob_size_
// - enable_embedded_blob_refcounting_
// - current_embedded_blob_refs_
base::LazyMutex current_embedded_blob_refcount_mutex_ = LAZY_MUTEX_INITIALIZER;

const uint8_t* sticky_embedded_blob_ = nullptr;
uint32_t sticky_embedded_blob_size_ = 0;

bool enable_embedded_blob_refcounting_ = true;
int current_embedded_blob_refs_ = 0;

const uint8_t* StickyEmbeddedBlob() { return sticky_embedded_blob_; }
uint32_t StickyEmbeddedBlobSize() { return sticky_embedded_blob_size_; }

void SetStickyEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) {
  sticky_embedded_blob_ = blob;
  sticky_embedded_blob_size_ = blob_size;
}

176 177
}  // namespace

178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200
void DisableEmbeddedBlobRefcounting() {
  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
  enable_embedded_blob_refcounting_ = false;
}

void FreeCurrentEmbeddedBlob() {
  CHECK(!enable_embedded_blob_refcounting_);
  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());

  if (StickyEmbeddedBlob() == nullptr) return;

  CHECK_EQ(StickyEmbeddedBlob(), Isolate::CurrentEmbeddedBlob());

  InstructionStream::FreeOffHeapInstructionStream(
      const_cast<uint8_t*>(Isolate::CurrentEmbeddedBlob()),
      Isolate::CurrentEmbeddedBlobSize());

  current_embedded_blob_.store(nullptr, std::memory_order_relaxed);
  current_embedded_blob_size_.store(0, std::memory_order_relaxed);
  sticky_embedded_blob_ = nullptr;
  sticky_embedded_blob_size_ = 0;
}

201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216
// static
bool Isolate::CurrentEmbeddedBlobIsBinaryEmbedded() {
  // In some situations, we must be able to rely on the embedded blob being
  // immortal immovable. This is the case if the blob is binary-embedded.
  // See blob lifecycle controls above for descriptions of when the current
  // embedded blob may change (e.g. in tests or mksnapshot). If the blob is
  // binary-embedded, it is immortal immovable.
  const uint8_t* blob =
      current_embedded_blob_.load(std::memory_order::memory_order_relaxed);
  if (blob == nullptr) return false;
#ifdef V8_MULTI_SNAPSHOTS
  if (blob == TrustedEmbeddedBlob()) return true;
#endif
  return blob == DefaultEmbeddedBlob();
}

217
void Isolate::SetEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) {
218 219
  CHECK_NOT_NULL(blob);

220 221 222 223
  embedded_blob_ = blob;
  embedded_blob_size_ = blob_size;
  current_embedded_blob_.store(blob, std::memory_order_relaxed);
  current_embedded_blob_size_.store(blob_size, std::memory_order_relaxed);
224 225

#ifdef DEBUG
226 227 228
  // Verify that the contents of the embedded blob are unchanged from
  // serialization-time, just to ensure the compiler isn't messing with us.
  EmbeddedData d = EmbeddedData::FromBlob();
229 230 231 232 233 234
  if (d.EmbeddedBlobHash() != d.CreateEmbeddedBlobHash()) {
    FATAL(
        "Embedded blob checksum verification failed. This indicates that the "
        "embedded blob has been modified since compilation time. A common "
        "cause is a debugging breakpoint set within builtin code.");
  }
235
#endif  // DEBUG
236 237
}

238 239 240 241 242 243 244 245 246 247 248 249 250
void Isolate::ClearEmbeddedBlob() {
  CHECK(enable_embedded_blob_refcounting_);
  CHECK_EQ(embedded_blob_, CurrentEmbeddedBlob());
  CHECK_EQ(embedded_blob_, StickyEmbeddedBlob());

  embedded_blob_ = nullptr;
  embedded_blob_size_ = 0;
  current_embedded_blob_.store(nullptr, std::memory_order_relaxed);
  current_embedded_blob_size_.store(0, std::memory_order_relaxed);
  sticky_embedded_blob_ = nullptr;
  sticky_embedded_blob_size_ = 0;
}

251 252
const uint8_t* Isolate::embedded_blob() const { return embedded_blob_; }
uint32_t Isolate::embedded_blob_size() const { return embedded_blob_size_; }
253 254 255 256 257 258 259 260 261 262 263

// static
const uint8_t* Isolate::CurrentEmbeddedBlob() {
  return current_embedded_blob_.load(std::memory_order::memory_order_relaxed);
}

// static
uint32_t Isolate::CurrentEmbeddedBlobSize() {
  return current_embedded_blob_size_.load(
      std::memory_order::memory_order_relaxed);
}
264

265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291
size_t Isolate::HashIsolateForEmbeddedBlob() {
  DCHECK(builtins_.is_initialized());
  DCHECK(FLAG_embedded_builtins);
  DCHECK(Builtins::AllBuiltinsAreIsolateIndependent());

  DisallowHeapAllocation no_gc;

  static constexpr size_t kSeed = 0;
  size_t hash = kSeed;

  // Hash data sections of builtin code objects.
  for (int i = 0; i < Builtins::builtin_count; i++) {
    Code code = heap_.builtin(i);

    DCHECK(Internals::HasHeapObjectTag(code.ptr()));
    uint8_t* const code_ptr =
        reinterpret_cast<uint8_t*>(code.ptr() - kHeapObjectTag);

    // These static asserts ensure we don't miss relevant fields. We don't hash
    // instruction size and flags since they change when creating the off-heap
    // trampolines. Other data fields must remain the same.
    STATIC_ASSERT(Code::kInstructionSizeOffset == Code::kDataStart);
    STATIC_ASSERT(Code::kFlagsOffset == Code::kInstructionSizeOffsetEnd + 1);
    STATIC_ASSERT(Code::kSafepointTableOffsetOffset ==
                  Code::kFlagsOffsetEnd + 1);
    static constexpr int kStartOffset = Code::kSafepointTableOffsetOffset;

292
    for (int j = kStartOffset; j < Code::kUnalignedHeaderSize; j++) {
293 294 295 296 297 298
      hash = base::hash_combine(hash, size_t{code_ptr[j]});
    }
  }

  // The builtins constants table is also tightly tied to embedded builtins.
  hash = base::hash_combine(
299
      hash, static_cast<size_t>(heap_.builtins_constants_table().length()));
300 301 302 303

  return hash;
}

304 305
base::Thread::LocalStorageKey Isolate::isolate_key_;
base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_;
306
#if DEBUG
307
std::atomic<bool> Isolate::isolate_key_created_{false};
308
#endif
309

310 311 312 313 314
namespace {
// A global counter for all generated Isolates, might overflow.
std::atomic<int> isolate_counter{0};
}  // namespace

315
Isolate::PerIsolateThreadData*
316
Isolate::FindOrAllocatePerThreadDataForThisThread() {
317
  ThreadId thread_id = ThreadId::Current();
318
  PerIsolateThreadData* per_thread = nullptr;
319
  {
320
    base::MutexGuard lock_guard(&thread_data_table_mutex_);
321
    per_thread = thread_data_table_.Lookup(thread_id);
322
    if (per_thread == nullptr) {
323
      base::OS::AdjustSchedulingParams();
324
      per_thread = new PerIsolateThreadData(this, thread_id);
325
      thread_data_table_.Insert(per_thread);
326
    }
327
    DCHECK(thread_data_table_.Lookup(thread_id) == per_thread);
328 329 330 331
  }
  return per_thread;
}

332
void Isolate::DiscardPerThreadDataForThisThread() {
333 334
  ThreadId thread_id = ThreadId::TryGetCurrent();
  if (thread_id.IsValid()) {
Clemens Hammacher's avatar
Clemens Hammacher committed
335 336
    DCHECK_NE(thread_manager_->mutex_owner_.load(std::memory_order_relaxed),
              thread_id);
337
    base::MutexGuard lock_guard(&thread_data_table_mutex_);
338
    PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id);
339 340
    if (per_thread) {
      DCHECK(!per_thread->thread_state_);
341
      thread_data_table_.Remove(per_thread);
342 343 344 345
    }
  }
}

346 347
Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() {
  ThreadId thread_id = ThreadId::Current();
348 349 350 351 352
  return FindPerThreadDataForThread(thread_id);
}

Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread(
    ThreadId thread_id) {
353
  PerIsolateThreadData* per_thread = nullptr;
354
  {
355
    base::MutexGuard lock_guard(&thread_data_table_mutex_);
356
    per_thread = thread_data_table_.Lookup(thread_id);
357 358 359 360
  }
  return per_thread;
}

361 362
void Isolate::InitializeOncePerProcess() {
  isolate_key_ = base::Thread::CreateThreadLocalKey();
363
#if DEBUG
364 365 366
  bool expected = false;
  DCHECK_EQ(true, isolate_key_created_.compare_exchange_strong(
                      expected, true, std::memory_order_relaxed));
367
#endif
368
  per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey();
369 370
}

371
Address Isolate::get_address_from_id(IsolateAddressId id) {
372 373 374
  return isolate_addresses_[id];
}

375
char* Isolate::Iterate(RootVisitor* v, char* thread_storage) {
376 377 378 379 380 381 382 383 384 385
  ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(thread_storage);
  Iterate(v, thread);
  return thread_storage + sizeof(ThreadLocalTop);
}

void Isolate::IterateThread(ThreadVisitor* v, char* t) {
  ThreadLocalTop* thread = reinterpret_cast<ThreadLocalTop*>(t);
  v->VisitThread(this, thread);
}

386
void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) {
387
  // Visit the roots from the top for a given thread.
388
  v->VisitRootPointer(Root::kTop, nullptr,
389
                      FullObjectSlot(&thread->pending_exception_));
390
  v->VisitRootPointer(Root::kTop, nullptr,
391 392
                      FullObjectSlot(&thread->pending_message_obj_));
  v->VisitRootPointer(Root::kTop, nullptr, FullObjectSlot(&thread->context_));
393
  v->VisitRootPointer(Root::kTop, nullptr,
394
                      FullObjectSlot(&thread->scheduled_exception_));
395

396
  for (v8::TryCatch* block = thread->try_catch_handler_; block != nullptr;
397
       block = block->next_) {
398 399 400
    // TODO(3770): Make TryCatch::exception_ an Address (and message_obj_ too).
    v->VisitRootPointer(
        Root::kTop, nullptr,
401
        FullObjectSlot(reinterpret_cast<Address>(&(block->exception_))));
402 403
    v->VisitRootPointer(
        Root::kTop, nullptr,
404
        FullObjectSlot(reinterpret_cast<Address>(&(block->message_obj_))));
405 406 407
  }

  // Iterate over pointers on native execution stack.
408
  wasm::WasmCodeRefScope wasm_code_ref_scope;
409 410 411 412 413
  for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) {
    it.frame()->Iterate(v);
  }
}

414
void Isolate::Iterate(RootVisitor* v) {
415 416 417 418
  ThreadLocalTop* current_t = thread_local_top();
  Iterate(v, current_t);
}

419
void Isolate::IterateDeferredHandles(RootVisitor* visitor) {
420
  for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr;
421 422 423 424 425
       deferred = deferred->next_) {
    deferred->Iterate(visitor);
  }
}

426
#ifdef DEBUG
427 428 429 430
bool Isolate::IsDeferredHandle(Address* handle) {
  // Comparing unrelated pointers (not from the same array) is undefined
  // behavior, so cast to Address before making arbitrary comparisons.
  Address handle_as_address = reinterpret_cast<Address>(handle);
431
  // Each DeferredHandles instance keeps the handles to one job in the
432
  // concurrent recompilation queue, containing a list of blocks.  Each block
433 434 435 436
  // contains kHandleBlockSize handles except for the first block, which may
  // not be fully filled.
  // We iterate through all the blocks to see whether the argument handle
  // belongs to one of the blocks.  If so, it is deferred.
437
  for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr;
438
       deferred = deferred->next_) {
439
    std::vector<Address*>* blocks = &deferred->blocks_;
440
    for (size_t i = 0; i < blocks->size(); i++) {
441
      Address* block_limit = (i == 0) ? deferred->first_block_limit_
442
                                      : blocks->at(i) + kHandleBlockSize;
443 444 445 446
      if (reinterpret_cast<Address>(blocks->at(i)) <= handle_as_address &&
          handle_as_address < reinterpret_cast<Address>(block_limit)) {
        return true;
      }
447 448 449 450 451 452
    }
  }
  return false;
}
#endif  // DEBUG

453
void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) {
454
  thread_local_top()->try_catch_handler_ = that;
455 456 457
}

void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) {
458 459
  DCHECK(thread_local_top()->try_catch_handler_ == that);
  thread_local_top()->try_catch_handler_ = that->next_;
460 461 462 463 464 465
}

Handle<String> Isolate::StackTraceString() {
  if (stack_trace_nesting_level_ == 0) {
    stack_trace_nesting_level_++;
    HeapStringAllocator allocator;
466
    StringStream::ClearMentionedObjectCache(this);
467 468 469
    StringStream accumulator(&allocator);
    incomplete_message_ = &accumulator;
    PrintStack(&accumulator);
470
    Handle<String> stack_trace = accumulator.ToString(this);
471
    incomplete_message_ = nullptr;
472 473 474 475
    stack_trace_nesting_level_ = 0;
    return stack_trace;
  } else if (stack_trace_nesting_level_ == 1) {
    stack_trace_nesting_level_++;
476
    base::OS::PrintError(
477
        "\n\nAttempt to print stack while printing stack (double fault)\n");
478
    base::OS::PrintError(
479
        "If you are lucky you may find a partial stack dump on stdout.\n\n");
480
    incomplete_message_->OutputToStdOut();
481
    return factory()->empty_string();
482
  } else {
483
    base::OS::Abort();
484
    // Unreachable
485
    return factory()->empty_string();
486 487 488
  }
}

489 490 491 492 493
void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3,
                                   void* ptr4) {
  StackTraceFailureMessage message(this, ptr1, ptr2, ptr3, ptr4);
  message.Print();
  base::OS::Abort();
494
}
495

496 497 498
void StackTraceFailureMessage::Print() volatile {
  // Print the details of this failure message object, including its own address
  // to force stack allocation.
499
  base::OS::PrintError(
500 501 502
      "Stacktrace:\n   ptr1=%p\n    ptr2=%p\n    ptr3=%p\n    ptr4=%p\n    "
      "failure_message_object=%p\n%s",
      ptr1_, ptr2_, ptr3_, ptr4_, this, &js_stack_trace_[0]);
503 504
}

505 506 507 508 509 510 511 512 513 514 515 516 517 518 519 520 521 522 523 524
StackTraceFailureMessage::StackTraceFailureMessage(Isolate* isolate, void* ptr1,
                                                   void* ptr2, void* ptr3,
                                                   void* ptr4) {
  isolate_ = isolate;
  ptr1_ = ptr1;
  ptr2_ = ptr2;
  ptr3_ = ptr3;
  ptr4_ = ptr4;
  // Write a stracktrace into the {js_stack_trace_} buffer.
  const size_t buffer_length = arraysize(js_stack_trace_);
  memset(&js_stack_trace_, 0, buffer_length);
  FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1);
  StringStream accumulator(&fixed, StringStream::kPrintObjectConcise);
  isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose);
  // Keeping a reference to the last code objects to increase likelyhood that
  // they get included in the minidump.
  const size_t code_objects_length = arraysize(code_objects_);
  size_t i = 0;
  StackFrameIterator it(isolate);
  for (; !it.done() && i < code_objects_length; it.Advance()) {
525 526
    code_objects_[i++] =
        reinterpret_cast<void*>(it.frame()->unchecked_code().ptr());
527
  }
528 529
}

530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568 569
namespace {

class StackFrameCacheHelper : public AllStatic {
 public:
  static MaybeHandle<StackTraceFrame> LookupCachedFrame(
      Isolate* isolate, Handle<AbstractCode> code, int code_offset) {
    if (FLAG_optimize_for_size) return MaybeHandle<StackTraceFrame>();

    const auto maybe_cache = handle(code->stack_frame_cache(), isolate);
    if (!maybe_cache->IsSimpleNumberDictionary())
      return MaybeHandle<StackTraceFrame>();

    const auto cache = Handle<SimpleNumberDictionary>::cast(maybe_cache);
    const int entry = cache->FindEntry(isolate, code_offset);
    if (entry != NumberDictionary::kNotFound) {
      return handle(StackTraceFrame::cast(cache->ValueAt(entry)), isolate);
    }
    return MaybeHandle<StackTraceFrame>();
  }

  static void CacheFrameAndUpdateCache(Isolate* isolate,
                                       Handle<AbstractCode> code,
                                       int code_offset,
                                       Handle<StackTraceFrame> frame) {
    if (FLAG_optimize_for_size) return;

    const auto maybe_cache = handle(code->stack_frame_cache(), isolate);
    const auto cache = maybe_cache->IsSimpleNumberDictionary()
                           ? Handle<SimpleNumberDictionary>::cast(maybe_cache)
                           : SimpleNumberDictionary::New(isolate, 1);
    Handle<SimpleNumberDictionary> new_cache =
        SimpleNumberDictionary::Set(isolate, cache, code_offset, frame);
    if (*new_cache != *cache || !maybe_cache->IsSimpleNumberDictionary()) {
      AbstractCode::SetStackFrameCache(code, new_cache);
    }
  }
};

}  // anonymous namespace

570
class FrameArrayBuilder {
571
 public:
572 573
  enum FrameFilterMode { ALL, CURRENT_SECURITY_CONTEXT };

574
  FrameArrayBuilder(Isolate* isolate, FrameSkipMode mode, int limit,
575
                    Handle<Object> caller, FrameFilterMode filter_mode)
576 577 578 579 580
      : isolate_(isolate),
        mode_(mode),
        limit_(limit),
        caller_(caller),
        check_security_context_(filter_mode == CURRENT_SECURITY_CONTEXT) {
581 582 583 584 585 586 587 588 589 590 591
    switch (mode_) {
      case SKIP_FIRST:
        skip_next_frame_ = true;
        break;
      case SKIP_UNTIL_SEEN:
        DCHECK(caller_->IsJSFunction());
        skip_next_frame_ = true;
        break;
      case SKIP_NONE:
        skip_next_frame_ = false;
        break;
jgruber's avatar
jgruber committed
592
    }
593 594

    elements_ = isolate->factory()->NewFrameArray(Min(limit, 10));
595 596
  }

597 598 599 600 601 602
  void AppendAsyncFrame(Handle<JSGeneratorObject> generator_object) {
    if (full()) return;
    Handle<JSFunction> function(generator_object->function(), isolate_);
    if (!IsVisibleInStackTrace(function)) return;
    int flags = FrameArray::kIsAsync;
    if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
603

604 605
    Handle<Object> receiver(generator_object->receiver(), isolate_);
    Handle<AbstractCode> code(
606
        AbstractCode::cast(function->shared().GetBytecodeArray()), isolate_);
607 608 609 610
    int offset = Smi::ToInt(generator_object->input_or_debug_pos());
    // The stored bytecode offset is relative to a different base than what
    // is used in the source position table, hence the subtraction.
    offset -= BytecodeArray::kHeaderSize - kHeapObjectTag;
611 612 613

    Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
    if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) {
614
      int param_count = function->shared().internal_formal_parameter_count();
615 616
      parameters = isolate_->factory()->NewFixedArray(param_count);
      for (int i = 0; i < param_count; i++) {
617
        parameters->set(i, generator_object->parameters_and_registers().get(i));
618 619 620
      }
    }

621
    elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, code,
622
                                          offset, flags, parameters);
623 624
  }

625 626 627 628 629 630 631 632 633 634
  void AppendPromiseAllFrame(Handle<Context> context, int offset) {
    if (full()) return;
    int flags = FrameArray::kIsAsync | FrameArray::kIsPromiseAll;

    Handle<Context> native_context(context->native_context(), isolate_);
    Handle<JSFunction> function(native_context->promise_all(), isolate_);
    if (!IsVisibleInStackTrace(function)) return;

    Handle<Object> receiver(native_context->promise_function(), isolate_);
    Handle<AbstractCode> code(AbstractCode::cast(function->code()), isolate_);
635 636 637 638

    // TODO(mmarchini) save Promises list from Promise.all()
    Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();

639
    elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, code,
640
                                          offset, flags, parameters);
641 642
  }

643
  void AppendJavaScriptFrame(
644 645
      FrameSummary::JavaScriptFrameSummary const& summary) {
    // Filter out internal frames that we do not want to show.
646
    if (!IsVisibleInStackTrace(summary.function())) return;
647 648 649 650

    Handle<AbstractCode> abstract_code = summary.abstract_code();
    const int offset = summary.code_offset();

651
    const bool is_constructor = summary.is_constructor();
652

653 654 655 656 657
    int flags = 0;
    Handle<JSFunction> function = summary.function();
    if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
    if (is_constructor) flags |= FrameArray::kIsConstructor;

658 659 660 661
    Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
    if (V8_UNLIKELY(FLAG_detailed_error_stack_trace))
      parameters = summary.parameters();

662 663
    elements_ = FrameArray::AppendJSFrame(
        elements_, TheHoleToUndefined(isolate_, summary.receiver()), function,
664
        abstract_code, offset, flags, parameters);
665 666
  }

667
  void AppendWasmCompiledFrame(
668
      FrameSummary::WasmCompiledFrameSummary const& summary) {
669
    if (summary.code()->kind() != wasm::WasmCode::kFunction) return;
670 671
    Handle<WasmInstanceObject> instance = summary.wasm_instance();
    int flags = 0;
672
    if (instance->module_object().is_asm_js()) {
673 674 675
      flags |= FrameArray::kIsAsmJsWasmFrame;
      if (summary.at_to_number_conversion()) {
        flags |= FrameArray::kAsmJsAtNumberConversion;
676
      }
677 678
    } else {
      flags |= FrameArray::kIsWasmFrame;
679
    }
680 681 682 683 684 685

    elements_ = FrameArray::AppendWasmFrame(
        elements_, instance, summary.function_index(), summary.code(),
        summary.code_offset(), flags);
  }

686
  void AppendWasmInterpretedFrame(
687 688 689
      FrameSummary::WasmInterpretedFrameSummary const& summary) {
    Handle<WasmInstanceObject> instance = summary.wasm_instance();
    int flags = FrameArray::kIsWasmInterpretedFrame;
690
    DCHECK(!instance->module_object().is_asm_js());
691 692 693
    elements_ = FrameArray::AppendWasmFrame(elements_, instance,
                                            summary.function_index(), {},
                                            summary.byte_offset(), flags);
694 695
  }

696
  void AppendBuiltinExitFrame(BuiltinExitFrame* exit_frame) {
697 698 699
    Handle<JSFunction> function = handle(exit_frame->function(), isolate_);

    // Filter out internal frames that we do not want to show.
700
    if (!IsVisibleInStackTrace(function)) return;
701

702 703 704
    // TODO(szuend): Remove this check once the flag is enabled
    //               by default.
    if (!FLAG_experimental_stack_trace_frames &&
705
        function->shared().IsApiFunction()) {
706 707 708
      return;
    }

709 710 711
    Handle<Object> receiver(exit_frame->receiver(), isolate_);
    Handle<Code> code(exit_frame->LookupCode(), isolate_);
    const int offset =
712
        static_cast<int>(exit_frame->pc() - code->InstructionStart());
713 714 715 716 717

    int flags = 0;
    if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict;
    if (exit_frame->IsConstructor()) flags |= FrameArray::kIsConstructor;

718 719 720 721 722 723 724 725 726
    Handle<FixedArray> parameters = isolate_->factory()->empty_fixed_array();
    if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) {
      int param_count = exit_frame->ComputeParametersCount();
      parameters = isolate_->factory()->NewFixedArray(param_count);
      for (int i = 0; i < param_count; i++) {
        parameters->set(i, exit_frame->GetParameter(i));
      }
    }

727 728
    elements_ = FrameArray::AppendJSFrame(elements_, receiver, function,
                                          Handle<AbstractCode>::cast(code),
729
                                          offset, flags, parameters);
730 731 732 733 734
  }

  bool full() { return elements_->FrameCount() >= limit_; }

  Handle<FrameArray> GetElements() {
735
    elements_->ShrinkToFit(isolate_);
736 737 738
    return elements_;
  }

739
  // Creates a StackTraceFrame object for each frame in the FrameArray.
740 741
  Handle<FixedArray> GetElementsAsStackTraceFrameArray(
      bool enable_frame_caching) {
742 743 744 745 746 747
    elements_->ShrinkToFit(isolate_);
    const int frame_count = elements_->FrameCount();
    Handle<FixedArray> stack_trace =
        isolate_->factory()->NewFixedArray(frame_count);

    for (int i = 0; i < frame_count; ++i) {
748 749 750 751 752
      // Caching stack frames only happens for user JS frames.
      const bool cache_frame =
          enable_frame_caching && !elements_->IsAnyWasmFrame(i) &&
          elements_->Function(i).shared().IsUserJavaScript();
      if (cache_frame) {
753 754 755 756 757 758 759 760 761 762 763 764 765 766 767
        MaybeHandle<StackTraceFrame> maybe_frame =
            StackFrameCacheHelper::LookupCachedFrame(
                isolate_, handle(elements_->Code(i), isolate_),
                Smi::ToInt(elements_->Offset(i)));
        if (!maybe_frame.is_null()) {
          Handle<StackTraceFrame> frame = maybe_frame.ToHandleChecked();
          stack_trace->set(i, *frame);
          continue;
        }
      }

      Handle<StackTraceFrame> frame =
          isolate_->factory()->NewStackTraceFrame(elements_, i);
      stack_trace->set(i, *frame);

768
      if (cache_frame) {
769 770 771 772 773 774 775 776
        StackFrameCacheHelper::CacheFrameAndUpdateCache(
            isolate_, handle(elements_->Code(i), isolate_),
            Smi::ToInt(elements_->Offset(i)), frame);
      }
    }
    return stack_trace;
  }

777
 private:
jgruber's avatar
jgruber committed
778
  // Poison stack frames below the first strict mode frame.
779 780
  // The stack trace API should not expose receivers and function
  // objects on frames deeper than the top-most one with a strict mode
jgruber's avatar
jgruber committed
781
  // function.
782
  bool IsStrictFrame(Handle<JSFunction> function) {
783
    if (!encountered_strict_function_) {
784
      encountered_strict_function_ =
785
          is_strict(function->shared().language_mode());
786
    }
jgruber's avatar
jgruber committed
787
    return encountered_strict_function_;
788 789 790 791
  }

  // Determines whether the given stack frame should be displayed in a stack
  // trace.
792 793 794
  bool IsVisibleInStackTrace(Handle<JSFunction> function) {
    return ShouldIncludeFrame(function) && IsNotHidden(function) &&
           IsInSameSecurityContext(function);
795 796
  }

jgruber's avatar
jgruber committed
797 798 799 800
  // This mechanism excludes a number of uninteresting frames from the stack
  // trace. This can be be the first frame (which will be a builtin-exit frame
  // for the error constructor builtin) or every frame until encountering a
  // user-specified function.
801
  bool ShouldIncludeFrame(Handle<JSFunction> function) {
jgruber's avatar
jgruber committed
802 803 804 805 806 807 808 809
    switch (mode_) {
      case SKIP_NONE:
        return true;
      case SKIP_FIRST:
        if (!skip_next_frame_) return true;
        skip_next_frame_ = false;
        return false;
      case SKIP_UNTIL_SEEN:
810
        if (skip_next_frame_ && (*function == *caller_)) {
jgruber's avatar
jgruber committed
811 812 813 814
          skip_next_frame_ = false;
          return false;
        }
        return !skip_next_frame_;
815
    }
jgruber's avatar
jgruber committed
816
    UNREACHABLE();
817 818
  }

819
  bool IsNotHidden(Handle<JSFunction> function) {
820
    // Functions defined not in user scripts are not visible unless directly
821 822 823
    // exposed, in which case the native flag is set.
    // The --builtins-in-stack-traces command line flag allows including
    // internal call sites in the stack trace for debugging purposes.
824
    if (!FLAG_builtins_in_stack_traces &&
825 826
        !function->shared().IsUserJavaScript()) {
      return function->shared().native() || function->shared().IsApiFunction();
827 828 829 830
    }
    return true;
  }

831
  bool IsInSameSecurityContext(Handle<JSFunction> function) {
832
    if (!check_security_context_) return true;
833
    return isolate_->context().HasSameSecurityTokenAs(function->context());
834 835
  }

836 837 838 839 840 841 842
  // TODO(jgruber): Fix all cases in which frames give us a hole value (e.g. the
  // receiver in RegExp constructor frames.
  Handle<Object> TheHoleToUndefined(Isolate* isolate, Handle<Object> in) {
    return (in->IsTheHole(isolate))
               ? Handle<Object>::cast(isolate->factory()->undefined_value())
               : in;
  }
jgruber's avatar
jgruber committed
843

844
  Isolate* isolate_;
845
  const FrameSkipMode mode_;
846
  int limit_;
847
  const Handle<Object> caller_;
848 849
  bool skip_next_frame_ = true;
  bool encountered_strict_function_ = false;
850
  const bool check_security_context_;
851
  Handle<FrameArray> elements_;
852 853
};

jgruber's avatar
jgruber committed
854 855 856 857 858 859 860 861 862
bool GetStackTraceLimit(Isolate* isolate, int* result) {
  Handle<JSObject> error = isolate->error_function();

  Handle<String> key = isolate->factory()->stackTraceLimit_string();
  Handle<Object> stack_trace_limit = JSReceiver::GetDataProperty(error, key);
  if (!stack_trace_limit->IsNumber()) return false;

  // Ensure that limit is not negative.
  *result = Max(FastD2IChecked(stack_trace_limit->Number()), 0);
863 864 865 866 867

  if (*result != FLAG_stack_trace_limit) {
    isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit);
  }

jgruber's avatar
jgruber committed
868
  return true;
jgruber's avatar
jgruber committed
869 870
}

871
bool NoExtension(const v8::FunctionCallbackInfo<v8::Value>&) { return false; }
872

873
bool IsBuiltinFunction(Isolate* isolate, HeapObject object,
874
                       Builtins::Name builtin_index) {
875
  if (!object.IsJSFunction()) return false;
876
  JSFunction const function = JSFunction::cast(object);
877
  return function.code() == isolate->builtins()->builtin(builtin_index);
878 879 880 881 882
}

void CaptureAsyncStackTrace(Isolate* isolate, Handle<JSPromise> promise,
                            FrameArrayBuilder* builder) {
  while (!builder->full()) {
883 884 885
    // Check that the {promise} is not settled.
    if (promise->status() != Promise::kPending) return;

886
    // Check that we have exactly one PromiseReaction on the {promise}.
887
    if (!promise->reactions().IsPromiseReaction()) return;
888 889
    Handle<PromiseReaction> reaction(
        PromiseReaction::cast(promise->reactions()), isolate);
890
    if (!reaction->next().IsSmi()) return;
891

892 893
    // Check if the {reaction} has one of the known async function or
    // async generator continuations as its fulfill handler.
894
    if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
895 896 897 898 899
                          Builtins::kAsyncFunctionAwaitResolveClosure) ||
        IsBuiltinFunction(isolate, reaction->fulfill_handler(),
                          Builtins::kAsyncGeneratorAwaitResolveClosure) ||
        IsBuiltinFunction(isolate, reaction->fulfill_handler(),
                          Builtins::kAsyncGeneratorYieldResolveClosure)) {
900 901 902
      // Now peak into the handlers' AwaitContext to get to
      // the JSGeneratorObject for the async function.
      Handle<Context> context(
903
          JSFunction::cast(reaction->fulfill_handler()).context(), isolate);
904 905 906 907 908 909 910 911
      Handle<JSGeneratorObject> generator_object(
          JSGeneratorObject::cast(context->extension()), isolate);
      CHECK(generator_object->is_suspended());

      // Append async frame corresponding to the {generator_object}.
      builder->AppendAsyncFrame(generator_object);

      // Try to continue from here.
912 913 914 915 916
      if (generator_object->IsJSAsyncFunctionObject()) {
        Handle<JSAsyncFunctionObject> async_function_object =
            Handle<JSAsyncFunctionObject>::cast(generator_object);
        promise = handle(async_function_object->promise(), isolate);
      } else {
917
        Handle<JSAsyncGeneratorObject> async_generator_object =
918
            Handle<JSAsyncGeneratorObject>::cast(generator_object);
919
        if (async_generator_object->queue().IsUndefined(isolate)) return;
920 921 922 923 924 925
        Handle<AsyncGeneratorRequest> async_generator_request(
            AsyncGeneratorRequest::cast(async_generator_object->queue()),
            isolate);
        promise = handle(JSPromise::cast(async_generator_request->promise()),
                         isolate);
      }
926 927 928 929 930 931 932 933 934 935 936 937 938 939 940
    } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
                                 Builtins::kPromiseAllResolveElementClosure)) {
      Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
                                  isolate);
      Handle<Context> context(function->context(), isolate);

      // We store the offset of the promise into the {function}'s
      // hash field for promise resolve element callbacks.
      int const offset = Smi::ToInt(Smi::cast(function->GetIdentityHash())) - 1;
      builder->AppendPromiseAllFrame(context, offset);

      // Now peak into the Promise.all() resolve element context to
      // find the promise capability that's being resolved when all
      // the concurrent promises resolve.
      int const index =
941
          PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot;
942 943
      Handle<PromiseCapability> capability(
          PromiseCapability::cast(context->get(index)), isolate);
944
      if (!capability->promise().IsJSPromise()) return;
945
      promise = handle(JSPromise::cast(capability->promise()), isolate);
946 947 948 949 950 951 952 953
    } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(),
                                 Builtins::kPromiseCapabilityDefaultResolve)) {
      Handle<JSFunction> function(JSFunction::cast(reaction->fulfill_handler()),
                                  isolate);
      Handle<Context> context(function->context(), isolate);
      promise =
          handle(JSPromise::cast(context->get(PromiseBuiltins::kPromiseSlot)),
                 isolate);
954 955 956 957 958 959 960 961
    } else {
      // We have some generic promise chain here, so try to
      // continue with the chained promise on the reaction
      // (only works for native promise chains).
      Handle<HeapObject> promise_or_capability(
          reaction->promise_or_capability(), isolate);
      if (promise_or_capability->IsJSPromise()) {
        promise = Handle<JSPromise>::cast(promise_or_capability);
962
      } else if (promise_or_capability->IsPromiseCapability()) {
963 964
        Handle<PromiseCapability> capability =
            Handle<PromiseCapability>::cast(promise_or_capability);
965
        if (!capability->promise().IsJSPromise()) return;
966
        promise = handle(JSPromise::cast(capability->promise()), isolate);
967 968 969
      } else {
        // Otherwise the {promise_or_capability} must be undefined here.
        CHECK(promise_or_capability->IsUndefined(isolate));
970
        return;
971 972 973 974 975
      }
    }
  }
}

976
namespace {
977

978
struct CaptureStackTraceOptions {
jgruber's avatar
jgruber committed
979
  int limit;
980 981 982 983 984 985 986 987 988
  // 'filter_mode' and 'skip_mode' are somewhat orthogonal. 'filter_mode'
  // specifies whether to capture all frames, or just frames in the same
  // security context. While 'skip_mode' allows skipping the first frame.
  FrameSkipMode skip_mode;
  FrameArrayBuilder::FrameFilterMode filter_mode;

  bool capture_builtin_exit_frames;
  bool capture_only_frames_subject_to_debugging;
  bool async_stack_trace;
989
  bool enable_frame_caching;
990 991 992 993 994
};

Handle<Object> CaptureStackTrace(Isolate* isolate, Handle<Object> caller,
                                 CaptureStackTraceOptions options) {
  DisallowJavascriptExecution no_js(isolate);
jgruber's avatar
jgruber committed
995

996
  wasm::WasmCodeRefScope code_ref_scope;
997 998
  FrameArrayBuilder builder(isolate, options.skip_mode, options.limit, caller,
                            options.filter_mode);
999

1000 1001 1002
  // Build the regular stack trace, and remember the last relevant
  // frame ID and inlined index (for the async stack trace handling
  // below, which starts from this last frame).
1003
  for (StackFrameIterator it(isolate); !it.done() && !builder.full();
1004 1005
       it.Advance()) {
    StackFrame* const frame = it.frame();
1006
    switch (frame->type()) {
1007
      case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION:
1008
      case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH:
1009
      case StackFrame::OPTIMIZED:
1010
      case StackFrame::INTERPRETED:
1011
      case StackFrame::BUILTIN:
1012 1013 1014 1015 1016 1017 1018
      case StackFrame::WASM_COMPILED:
      case StackFrame::WASM_INTERPRETER_ENTRY: {
        // A standard frame may include many summarized frames (due to
        // inlining).
        std::vector<FrameSummary> frames;
        StandardFrame::cast(frame)->Summarize(&frames);
        for (size_t i = frames.size(); i-- != 0 && !builder.full();) {
1019
          auto& summary = frames[i];
1020 1021 1022 1023 1024
          if (options.capture_only_frames_subject_to_debugging &&
              !summary.is_subject_to_debugging()) {
            continue;
          }

1025
          if (summary.IsJavaScript()) {
1026
            //=========================================================
1027
            // Handle a JavaScript frame.
1028
            //=========================================================
1029
            auto const& java_script = summary.AsJavaScript();
1030
            builder.AppendJavaScriptFrame(java_script);
1031
          } else if (summary.IsWasmCompiled()) {
1032
            //=========================================================
1033
            // Handle a WASM compiled frame.
1034
            //=========================================================
1035
            auto const& wasm_compiled = summary.AsWasmCompiled();
1036
            builder.AppendWasmCompiledFrame(wasm_compiled);
1037
          } else if (summary.IsWasmInterpreted()) {
1038
            //=========================================================
1039
            // Handle a WASM interpreted frame.
1040
            //=========================================================
1041
            auto const& wasm_interpreted = summary.AsWasmInterpreted();
1042
            builder.AppendWasmInterpretedFrame(wasm_interpreted);
1043 1044
          }
        }
1045
        break;
1046 1047
      }

1048
      case StackFrame::BUILTIN_EXIT:
1049 1050
        if (!options.capture_builtin_exit_frames) continue;

1051 1052
        // BuiltinExitFrames are not standard frames, so they do not have
        // Summarize(). However, they may have one JS frame worth showing.
1053
        builder.AppendBuiltinExitFrame(BuiltinExitFrame::cast(frame));
1054
        break;
1055

1056 1057
      default:
        break;
1058 1059
    }
  }
jgruber's avatar
jgruber committed
1060

1061 1062 1063
  // If --async-stack-traces are enabled and the "current microtask" is a
  // PromiseReactionJobTask, we try to enrich the stack trace with async
  // frames.
1064 1065
  if (options.async_stack_trace) {
    Handle<Object> current_microtask = isolate->factory()->current_microtask();
1066 1067 1068 1069 1070
    if (current_microtask->IsPromiseReactionJobTask()) {
      Handle<PromiseReactionJobTask> promise_reaction_job_task =
          Handle<PromiseReactionJobTask>::cast(current_microtask);
      // Check if the {reaction} has one of the known async function or
      // async generator continuations as its fulfill handler.
1071
      if (IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1072
                            Builtins::kAsyncFunctionAwaitResolveClosure) ||
1073
          IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1074
                            Builtins::kAsyncGeneratorAwaitResolveClosure) ||
1075
          IsBuiltinFunction(isolate, promise_reaction_job_task->handler(),
1076 1077 1078 1079
                            Builtins::kAsyncGeneratorYieldResolveClosure)) {
        // Now peak into the handlers' AwaitContext to get to
        // the JSGeneratorObject for the async function.
        Handle<Context> context(
1080
            JSFunction::cast(promise_reaction_job_task->handler()).context(),
1081
            isolate);
1082
        Handle<JSGeneratorObject> generator_object(
1083
            JSGeneratorObject::cast(context->extension()), isolate);
1084 1085 1086 1087
        if (generator_object->is_executing()) {
          if (generator_object->IsJSAsyncFunctionObject()) {
            Handle<JSAsyncFunctionObject> async_function_object =
                Handle<JSAsyncFunctionObject>::cast(generator_object);
1088 1089 1090
            Handle<JSPromise> promise(async_function_object->promise(),
                                      isolate);
            CaptureAsyncStackTrace(isolate, promise, &builder);
1091 1092 1093 1094 1095
          } else {
            Handle<JSAsyncGeneratorObject> async_generator_object =
                Handle<JSAsyncGeneratorObject>::cast(generator_object);
            Handle<AsyncGeneratorRequest> async_generator_request(
                AsyncGeneratorRequest::cast(async_generator_object->queue()),
1096
                isolate);
1097
            Handle<JSPromise> promise(
1098 1099
                JSPromise::cast(async_generator_request->promise()), isolate);
            CaptureAsyncStackTrace(isolate, promise, &builder);
1100
          }
1101
        }
1102 1103 1104 1105 1106 1107
      } else {
        // The {promise_reaction_job_task} doesn't belong to an await (or
        // yield inside an async generator), but we might still be able to
        // find an async frame if we follow along the chain of promises on
        // the {promise_reaction_job_task}.
        Handle<HeapObject> promise_or_capability(
1108
            promise_reaction_job_task->promise_or_capability(), isolate);
1109 1110 1111
        if (promise_or_capability->IsJSPromise()) {
          Handle<JSPromise> promise =
              Handle<JSPromise>::cast(promise_or_capability);
1112
          CaptureAsyncStackTrace(isolate, promise, &builder);
1113
        }
1114
      }
1115 1116 1117
    }
  }

1118
  // TODO(yangguo): Queue this structured stack trace for preprocessing on GC.
1119 1120
  return builder.GetElementsAsStackTraceFrameArray(
      options.enable_frame_caching);
1121 1122 1123 1124 1125 1126 1127 1128 1129 1130 1131 1132 1133 1134 1135 1136 1137
}

}  // namespace

Handle<Object> Isolate::CaptureSimpleStackTrace(Handle<JSReceiver> error_object,
                                                FrameSkipMode mode,
                                                Handle<Object> caller) {
  int limit;
  if (!GetStackTraceLimit(this, &limit)) return factory()->undefined_value();

  CaptureStackTraceOptions options;
  options.limit = limit;
  options.skip_mode = mode;
  options.capture_builtin_exit_frames = true;
  options.async_stack_trace = FLAG_async_stack_traces;
  options.filter_mode = FrameArrayBuilder::CURRENT_SECURITY_CONTEXT;
  options.capture_only_frames_subject_to_debugging = false;
1138
  options.enable_frame_caching = false;
1139 1140

  return CaptureStackTrace(this, caller, options);
1141 1142
}

1143 1144
MaybeHandle<JSReceiver> Isolate::CaptureAndSetDetailedStackTrace(
    Handle<JSReceiver> error_object) {
1145 1146
  if (capture_stack_trace_for_uncaught_exceptions_) {
    // Capture stack trace for a detailed exception message.
1147
    Handle<Name> key = factory()->detailed_stack_trace_symbol();
1148
    Handle<FixedArray> stack_trace = CaptureCurrentStackTrace(
1149 1150
        stack_trace_for_uncaught_exceptions_frame_limit_,
        stack_trace_for_uncaught_exceptions_options_);
1151 1152 1153 1154
    RETURN_ON_EXCEPTION(
        this,
        Object::SetProperty(this, error_object, key, stack_trace,
                            StoreOrigin::kMaybeKeyed,
1155
                            Just(ShouldThrow::kThrowOnError)),
1156
        JSReceiver);
1157
  }
1158
  return error_object;
1159 1160
}

1161
MaybeHandle<JSReceiver> Isolate::CaptureAndSetSimpleStackTrace(
1162 1163
    Handle<JSReceiver> error_object, FrameSkipMode mode,
    Handle<Object> caller) {
1164 1165
  // Capture stack trace for simple stack trace string formatting.
  Handle<Name> key = factory()->stack_trace_symbol();
1166 1167
  Handle<Object> stack_trace =
      CaptureSimpleStackTrace(error_object, mode, caller);
1168 1169
  RETURN_ON_EXCEPTION(this,
                      Object::SetProperty(this, error_object, key, stack_trace,
1170
                                          StoreOrigin::kMaybeKeyed,
1171
                                          Just(ShouldThrow::kThrowOnError)),
1172
                      JSReceiver);
1173
  return error_object;
1174 1175
}

1176 1177
Handle<FixedArray> Isolate::GetDetailedStackTrace(
    Handle<JSObject> error_object) {
1178 1179
  Handle<Name> key_detailed = factory()->detailed_stack_trace_symbol();
  Handle<Object> stack_trace =
1180
      JSReceiver::GetDataProperty(error_object, key_detailed);
1181 1182
  if (stack_trace->IsFixedArray()) return Handle<FixedArray>::cast(stack_trace);
  return Handle<FixedArray>();
1183 1184
}

1185 1186 1187
Address Isolate::GetAbstractPC(int* line, int* column) {
  JavaScriptFrameIterator it(this);

Camillo Bruni's avatar
Camillo Bruni committed
1188 1189 1190
  if (it.done()) {
    *line = -1;
    *column = -1;
1191
    return kNullAddress;
Camillo Bruni's avatar
Camillo Bruni committed
1192
  }
1193 1194
  JavaScriptFrame* frame = it.frame();
  DCHECK(!frame->is_builtin());
1195

1196
  Handle<SharedFunctionInfo> shared = handle(frame->function().shared(), this);
1197
  SharedFunctionInfo::EnsureSourcePositionsAvailable(this, shared);
1198 1199
  int position = frame->position();

1200 1201
  Object maybe_script = frame->function().shared().script();
  if (maybe_script.IsScript()) {
1202 1203 1204 1205 1206 1207 1208 1209 1210 1211 1212 1213 1214
    Handle<Script> script(Script::cast(maybe_script), this);
    Script::PositionInfo info;
    Script::GetPositionInfo(script, position, &info, Script::WITH_OFFSET);
    *line = info.line + 1;
    *column = info.column + 1;
  } else {
    *line = position;
    *column = -1;
  }

  if (frame->is_interpreted()) {
    InterpretedFrame* iframe = static_cast<InterpretedFrame*>(frame);
    Address bytecode_start =
1215
        iframe->GetBytecodeArray().GetFirstBytecodeAddress();
1216 1217 1218 1219 1220
    return bytecode_start + iframe->GetBytecodeOffset();
  }

  return frame->pc();
}
1221

1222
Handle<FixedArray> Isolate::CaptureCurrentStackTrace(
1223 1224 1225 1226 1227 1228 1229 1230
    int frame_limit, StackTrace::StackTraceOptions stack_trace_options) {
  CaptureStackTraceOptions options;
  options.limit = Max(frame_limit, 0);  // Ensure no negative values.
  options.skip_mode = SKIP_NONE;
  options.capture_builtin_exit_frames = false;
  options.async_stack_trace = false;
  options.filter_mode =
      (stack_trace_options & StackTrace::kExposeFramesAcrossSecurityOrigins)
1231 1232
          ? FrameArrayBuilder::ALL
          : FrameArrayBuilder::CURRENT_SECURITY_CONTEXT;
1233
  options.capture_only_frames_subject_to_debugging = true;
1234
  options.enable_frame_caching = true;
1235

1236 1237
  return Handle<FixedArray>::cast(
      CaptureStackTrace(this, factory()->undefined_value(), options));
1238 1239
}

1240
void Isolate::PrintStack(FILE* out, PrintStackMode mode) {
1241 1242
  if (stack_trace_nesting_level_ == 0) {
    stack_trace_nesting_level_++;
1243
    StringStream::ClearMentionedObjectCache(this);
1244 1245
    HeapStringAllocator allocator;
    StringStream accumulator(&allocator);
1246
    incomplete_message_ = &accumulator;
1247
    PrintStack(&accumulator, mode);
1248
    accumulator.OutputToFile(out);
1249
    InitializeLoggingAndCounters();
1250
    accumulator.Log(this);
1251
    incomplete_message_ = nullptr;
1252 1253 1254
    stack_trace_nesting_level_ = 0;
  } else if (stack_trace_nesting_level_ == 1) {
    stack_trace_nesting_level_++;
1255
    base::OS::PrintError(
1256
        "\n\nAttempt to print stack while printing stack (double fault)\n");
1257
    base::OS::PrintError(
1258
        "If you are lucky you may find a partial stack dump on stdout.\n\n");
1259
    incomplete_message_->OutputToFile(out);
1260 1261 1262
  }
}

1263
static void PrintFrames(Isolate* isolate, StringStream* accumulator,
1264
                        StackFrame::PrintMode mode) {
1265
  StackFrameIterator it(isolate);
1266 1267 1268 1269 1270
  for (int i = 0; !it.done(); it.Advance()) {
    it.frame()->Print(accumulator, mode, i++);
  }
}

1271
void Isolate::PrintStack(StringStream* accumulator, PrintStackMode mode) {
1272
  HandleScope scope(this);
1273
  wasm::WasmCodeRefScope wasm_code_ref_scope;
1274
  DCHECK(accumulator->IsMentionedObjectCacheClear(this));
1275 1276 1277 1278 1279

  // Avoid printing anything if there are no frames.
  if (c_entry_fp(thread_local_top()) == 0) return;

  accumulator->Add(
1280
      "\n==== JS stack trace =========================================\n\n");
1281
  PrintFrames(this, accumulator, StackFrame::OVERVIEW);
1282 1283 1284 1285 1286 1287
  if (mode == kPrintStackVerbose) {
    accumulator->Add(
        "\n==== Details ================================================\n\n");
    PrintFrames(this, accumulator, StackFrame::DETAILS);
    accumulator->PrintMentionedObjectCache(this);
  }
1288 1289 1290 1291 1292 1293 1294 1295
  accumulator->Add("=====================\n\n");
}

void Isolate::SetFailedAccessCheckCallback(
    v8::FailedAccessCheckCallback callback) {
  thread_local_top()->failed_access_check_callback_ = callback;
}

1296
void Isolate::ReportFailedAccessCheck(Handle<JSObject> receiver) {
1297
  if (!thread_local_top()->failed_access_check_callback_) {
1298
    return ScheduleThrow(*factory()->NewTypeError(MessageTemplate::kNoAccess));
1299
  }
1300

1301
  DCHECK(receiver->IsAccessCheckNeeded());
1302
  DCHECK(!context().is_null());
1303 1304

  // Get the data object from access check info.
1305
  HandleScope scope(this);
1306
  Handle<Object> data;
1307 1308
  {
    DisallowHeapAllocation no_gc;
1309 1310
    AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
    if (access_check_info.is_null()) {
1311
      AllowHeapAllocation doesnt_matter_anymore;
1312 1313
      return ScheduleThrow(
          *factory()->NewTypeError(MessageTemplate::kNoAccess));
1314
    }
1315
    data = handle(access_check_info.data(), this);
1316 1317 1318 1319 1320
  }

  // Leaving JavaScript.
  VMState<EXTERNAL> state(this);
  thread_local_top()->failed_access_check_callback_(
1321
      v8::Utils::ToLocal(receiver), v8::ACCESS_HAS, v8::Utils::ToLocal(data));
1322 1323
}

1324 1325
bool Isolate::MayAccess(Handle<Context> accessing_context,
                        Handle<JSObject> receiver) {
1326
  DCHECK(receiver->IsJSGlobalProxy() || receiver->IsAccessCheckNeeded());
1327 1328 1329 1330

  // Check for compatibility between the security tokens in the
  // current lexical context and the accessed object.

1331 1332
  // During bootstrapping, callback functions are not enabled yet.
  if (bootstrapper()->IsActive()) return true;
1333 1334 1335 1336
  {
    DisallowHeapAllocation no_gc;

    if (receiver->IsJSGlobalProxy()) {
1337 1338
      Object receiver_context = JSGlobalProxy::cast(*receiver).native_context();
      if (!receiver_context.IsContext()) return false;
1339 1340 1341

      // Get the native context of current top context.
      // avoid using Isolate::native_context() because it uses Handle.
1342
      Context native_context =
1343
          accessing_context->global_object().native_context();
1344 1345
      if (receiver_context == native_context) return true;

1346 1347
      if (Context::cast(receiver_context).security_token() ==
          native_context.security_token())
1348 1349 1350
        return true;
    }
  }
1351 1352

  HandleScope scope(this);
1353
  Handle<Object> data;
1354
  v8::AccessCheckCallback callback = nullptr;
1355 1356
  {
    DisallowHeapAllocation no_gc;
1357 1358
    AccessCheckInfo access_check_info = AccessCheckInfo::Get(this, receiver);
    if (access_check_info.is_null()) return false;
1359
    Object fun_obj = access_check_info.callback();
1360
    callback = v8::ToCData<v8::AccessCheckCallback>(fun_obj);
1361
    data = handle(access_check_info.data(), this);
1362
  }
1363

1364
  LOG(this, ApiSecurityCheck());
1365

1366 1367 1368
  {
    // Leaving JavaScript.
    VMState<EXTERNAL> state(this);
1369 1370
    return callback(v8::Utils::ToLocal(accessing_context),
                    v8::Utils::ToLocal(receiver), v8::Utils::ToLocal(data));
1371
  }
1372 1373
}

1374
Object Isolate::StackOverflow() {
1375
  if (FLAG_correctness_fuzzer_suppressions) {
1376 1377 1378
    FATAL("Aborting on stack overflow");
  }

1379
  DisallowJavascriptExecution no_js(this);
1380
  HandleScope scope(this);
1381 1382 1383

  Handle<JSFunction> fun = range_error_function();
  Handle<Object> msg = factory()->NewStringFromAsciiChecked(
1384
      MessageFormatter::TemplateString(MessageTemplate::kStackOverflow));
1385
  Handle<Object> no_caller;
1386
  Handle<Object> exception;
1387
  ASSIGN_RETURN_FAILURE_ON_EXCEPTION(
jgruber's avatar
jgruber committed
1388
      this, exception,
1389 1390
      ErrorUtils::Construct(this, fun, fun, msg, SKIP_NONE, no_caller,
                            ErrorUtils::StackTraceCollection::kSimple));
1391

1392
  Throw(*exception, nullptr);
1393

1394 1395
#ifdef VERIFY_HEAP
  if (FLAG_verify_heap && FLAG_stress_compaction) {
1396 1397
    heap()->CollectAllGarbage(Heap::kNoGCFlags,
                              GarbageCollectionReason::kTesting);
1398 1399 1400
  }
#endif  // VERIFY_HEAP

1401
  return ReadOnlyRoots(heap()).exception();
1402 1403
}

1404
Object Isolate::TerminateExecution() {
1405
  return Throw(ReadOnlyRoots(this).termination_exception(), nullptr);
1406 1407
}

1408 1409 1410 1411 1412
void Isolate::CancelTerminateExecution() {
  if (try_catch_handler()) {
    try_catch_handler()->has_terminated_ = false;
  }
  if (has_pending_exception() &&
1413
      pending_exception() == ReadOnlyRoots(this).termination_exception()) {
1414 1415 1416 1417
    thread_local_top()->external_caught_exception_ = false;
    clear_pending_exception();
  }
  if (has_scheduled_exception() &&
1418
      scheduled_exception() == ReadOnlyRoots(this).termination_exception()) {
1419 1420 1421 1422 1423
    thread_local_top()->external_caught_exception_ = false;
    clear_scheduled_exception();
  }
}

1424 1425 1426 1427 1428
void Isolate::RequestInterrupt(InterruptCallback callback, void* data) {
  ExecutionAccess access(this);
  api_interrupts_queue_.push(InterruptEntry(callback, data));
  stack_guard()->RequestApiInterrupt();
}
1429

1430
void Isolate::InvokeApiInterruptCallbacks() {
1431
  RuntimeCallTimerScope runtimeTimer(
1432
      this, RuntimeCallCounterId::kInvokeApiInterruptCallbacks);
1433 1434 1435 1436 1437 1438 1439 1440 1441
  // Note: callback below should be called outside of execution access lock.
  while (true) {
    InterruptEntry entry;
    {
      ExecutionAccess access(this);
      if (api_interrupts_queue_.empty()) return;
      entry = api_interrupts_queue_.front();
      api_interrupts_queue_.pop();
    }
1442 1443
    VMState<EXTERNAL> state(this);
    HandleScope handle_scope(this);
1444
    entry.first(reinterpret_cast<v8::Isolate*>(this), entry.second);
1445 1446 1447
  }
}

1448 1449
void ReportBootstrappingException(Handle<Object> exception,
                                  MessageLocation* location) {
1450
  base::OS::PrintError("Exception thrown during bootstrapping\n");
1451
  if (location == nullptr || location->script().is_null()) return;
1452 1453 1454
  // We are bootstrapping and caught an error where the location is set
  // and we have a script for the location.
  // In this case we could have an extension (or an internal error
1455
  // somewhere) and we print out the line number at which the error occurred
1456 1457 1458
  // to the console for easier debugging.
  int line_number =
      location->script()->GetLineNumber(location->start_pos()) + 1;
1459
  if (exception->IsString() && location->script()->name().IsString()) {
1460 1461
    base::OS::PrintError(
        "Extension or internal compilation error: %s in %s at line %d.\n",
1462 1463
        String::cast(*exception).ToCString().get(),
        String::cast(location->script()->name()).ToCString().get(),
1464
        line_number);
1465
  } else if (location->script()->name().IsString()) {
1466 1467
    base::OS::PrintError(
        "Extension or internal compilation error in %s at line %d.\n",
1468
        String::cast(location->script()->name()).ToCString().get(),
1469
        line_number);
1470 1471
  } else if (exception->IsString()) {
    base::OS::PrintError("Extension or internal compilation error: %s.\n",
1472
                         String::cast(*exception).ToCString().get());
1473 1474 1475 1476 1477 1478
  } else {
    base::OS::PrintError("Extension or internal compilation error.\n");
  }
#ifdef OBJECT_PRINT
  // Since comments and empty lines have been stripped from the source of
  // builtins, print the actual source here so that line numbers match.
1479
  if (location->script()->source().IsString()) {
1480 1481
    Handle<String> src(String::cast(location->script()->source()),
                       location->script()->GetIsolate());
1482
    PrintF("Failing script:");
1483
    int len = src->length();
1484 1485 1486 1487 1488 1489 1490 1491 1492 1493 1494 1495
    if (len == 0) {
      PrintF(" <not available>\n");
    } else {
      PrintF("\n");
      int line_number = 1;
      PrintF("%5d: ", line_number);
      for (int i = 0; i < len; i++) {
        uint16_t character = src->Get(i);
        PrintF("%c", character);
        if (character == '\n' && i < len - 2) {
          PrintF("%5d: ", ++line_number);
        }
1496
      }
1497
      PrintF("\n");
1498 1499 1500 1501 1502
    }
  }
#endif
}

1503
Object Isolate::Throw(Object raw_exception, MessageLocation* location) {
1504 1505 1506
  DCHECK(!has_pending_exception());

  HandleScope scope(this);
1507
  Handle<Object> exception(raw_exception, this);
1508

1509 1510 1511
  if (FLAG_print_all_exceptions) {
    printf("=========================================================\n");
    printf("Exception thrown:\n");
1512 1513
    if (location) {
      Handle<Script> script = location->script();
1514
      Handle<Object> name(script->GetNameOrSourceURL(), this);
1515
      printf("at ");
1516 1517
      if (name->IsString() && String::cast(*name).length() > 0)
        String::cast(*name).PrintOn(stdout);
1518 1519 1520 1521 1522 1523 1524
      else
        printf("<anonymous>");
// Script::GetLineNumber and Script::GetColumnNumber can allocate on the heap to
// initialize the line_ends array, so be careful when calling them.
#ifdef DEBUG
      if (AllowHeapAllocation::IsAllowed()) {
#else
1525
      if ((false)) {
1526 1527 1528 1529 1530 1531
#endif
        printf(", %d:%d - %d:%d\n",
               Script::GetLineNumber(script, location->start_pos()) + 1,
               Script::GetColumnNumber(script, location->start_pos()),
               Script::GetLineNumber(script, location->end_pos()) + 1,
               Script::GetColumnNumber(script, location->end_pos()));
1532 1533
        // Make sure to update the raw exception pointer in case it moved.
        raw_exception = *exception;
1534 1535 1536 1537
      } else {
        printf(", line %d\n", script->GetLineNumber(location->start_pos()) + 1);
      }
    }
1538
    raw_exception.Print();
1539 1540 1541 1542 1543
    printf("Stack Trace:\n");
    PrintStack(stdout);
    printf("=========================================================\n");
  }

1544 1545 1546 1547 1548 1549 1550 1551 1552 1553 1554
  // Determine whether a message needs to be created for the given exception
  // depending on the following criteria:
  // 1) External v8::TryCatch missing: Always create a message because any
  //    JavaScript handler for a finally-block might re-throw to top-level.
  // 2) External v8::TryCatch exists: Only create a message if the handler
  //    captures messages or is verbose (which reports despite the catch).
  // 3) ReThrow from v8::TryCatch: The message from a previous throw still
  //    exists and we preserve it instead of creating a new message.
  bool requires_message = try_catch_handler() == nullptr ||
                          try_catch_handler()->is_verbose_ ||
                          try_catch_handler()->capture_message_;
1555 1556 1557 1558 1559
  bool rethrowing_message = thread_local_top()->rethrowing_message_;

  thread_local_top()->rethrowing_message_ = false;

  // Notify debugger of exception.
1560 1561
  if (is_catchable_by_javascript(raw_exception)) {
    debug()->OnThrow(exception);
1562 1563
  }

1564 1565
  // Generate the message if required.
  if (requires_message && !rethrowing_message) {
1566 1567
    MessageLocation computed_location;
    // If no location was specified we try to use a computed one instead.
1568
    if (location == nullptr && ComputeLocation(&computed_location)) {
1569
      location = &computed_location;
1570 1571
    }

1572 1573 1574 1575
    if (bootstrapper()->IsActive()) {
      // It's not safe to try to make message objects or collect stack traces
      // while the bootstrapper is active since the infrastructure may not have
      // been properly initialized.
1576
      ReportBootstrappingException(exception, location);
1577
    } else {
1578
      Handle<Object> message_obj = CreateMessage(exception, location);
1579 1580
      thread_local_top()->pending_message_obj_ = *message_obj;

1581 1582 1583 1584 1585 1586
      // For any exception not caught by JavaScript, even when an external
      // handler is present:
      // If the abort-on-uncaught-exception flag is specified, and if the
      // embedder didn't specify a custom uncaught exception callback,
      // or if the custom callback determined that V8 should abort, then
      // abort.
1587 1588 1589 1590 1591 1592 1593 1594 1595 1596 1597 1598 1599 1600 1601
      if (FLAG_abort_on_uncaught_exception) {
        CatchType prediction = PredictExceptionCatcher();
        if ((prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) &&
            (!abort_on_uncaught_exception_callback_ ||
             abort_on_uncaught_exception_callback_(
                 reinterpret_cast<v8::Isolate*>(this)))) {
          // Prevent endless recursion.
          FLAG_abort_on_uncaught_exception = false;
          // This flag is intended for use by JavaScript developers, so
          // print a user-friendly stack trace (not an internal one).
          PrintF(stderr, "%s\n\nFROM\n",
                 MessageHandler::GetLocalizedMessage(this, message_obj).get());
          PrintCurrentStackTrace(stderr);
          base::OS::Abort();
        }
1602
      }
1603 1604 1605
    }
  }

1606
  // Set the exception being thrown.
1607
  set_pending_exception(*exception);
1608
  return ReadOnlyRoots(heap()).exception();
1609 1610
}

1611
Object Isolate::ReThrow(Object exception) {
1612
  DCHECK(!has_pending_exception());
1613 1614 1615

  // Set the exception being re-thrown.
  set_pending_exception(exception);
1616
  return ReadOnlyRoots(heap()).exception();
1617 1618
}

1619 1620
Object Isolate::UnwindAndFindHandler() {
  Object exception = pending_exception();
1621

1622
  auto FoundHandler = [&](Context context, Address instruction_start,
1623 1624
                          intptr_t handler_offset,
                          Address constant_pool_address, Address handler_sp,
1625
                          Address handler_fp) {
1626
    // Store information to be consumed by the CEntry.
1627
    thread_local_top()->pending_handler_context_ = context;
1628 1629
    thread_local_top()->pending_handler_entrypoint_ =
        instruction_start + handler_offset;
1630
    thread_local_top()->pending_handler_constant_pool_ = constant_pool_address;
1631 1632 1633
    thread_local_top()->pending_handler_fp_ = handler_fp;
    thread_local_top()->pending_handler_sp_ = handler_sp;

1634 1635 1636 1637 1638 1639
    // Return and clear pending exception. The contract is that:
    // (1) the pending exception is stored in one place (no duplication), and
    // (2) within generated-code land, that one place is the return register.
    // If/when we unwind back into C++ (returning to the JSEntry stub,
    // or to Execution::CallWasm), the returned exception will be sent
    // back to isolate->set_pending_exception(...).
1640 1641 1642
    clear_pending_exception();
    return exception;
  };
1643

1644 1645
  // Special handling of termination exceptions, uncatchable by JavaScript and
  // Wasm code, we unwind the handlers until the top ENTRY handler is found.
1646 1647 1648 1649
  bool catchable_by_js = is_catchable_by_javascript(exception);

  // Compute handler and stack unwinding information by performing a full walk
  // over the stack and dispatching according to the frame type.
1650 1651 1652
  for (StackFrameIterator iter(this);; iter.Advance()) {
    // Handler must exist.
    DCHECK(!iter.done());
eholk's avatar
eholk committed
1653

1654
    StackFrame* frame = iter.frame();
1655

1656 1657
    switch (frame->type()) {
      case StackFrame::ENTRY:
1658
      case StackFrame::CONSTRUCT_ENTRY: {
1659
        // For JSEntry frames we always have a handler.
1660 1661 1662
        StackHandler* handler = frame->top_handler();

        // Restore the next handler.
1663
        thread_local_top()->handler_ = handler->next_address();
1664 1665

        // Gather information from the handler.
1666
        Code code = frame->LookupCode();
1667
        HandlerTable table(code);
1668 1669
        return FoundHandler(Context(), code.InstructionStart(),
                            table.LookupReturn(0), code.constant_pool(),
1670 1671
                            handler->address() + StackHandlerConstants::kSize,
                            0);
1672
      }
1673

1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686
      case StackFrame::C_WASM_ENTRY: {
        StackHandler* handler = frame->top_handler();
        thread_local_top()->handler_ = handler->next_address();
        Code code = frame->LookupCode();
        HandlerTable table(code);
        Address instruction_start = code.InstructionStart();
        int return_offset = static_cast<int>(frame->pc() - instruction_start);
        int handler_offset = table.LookupReturn(return_offset);
        DCHECK_NE(-1, handler_offset);
        return FoundHandler(Context(), instruction_start, handler_offset,
                            code.constant_pool(), frame->sp(), frame->fp());
      }

1687 1688 1689 1690
      case StackFrame::WASM_COMPILED: {
        if (trap_handler::IsThreadInWasm()) {
          trap_handler::ClearThreadInWasm();
        }
1691

1692 1693
        // For WebAssembly frames we perform a lookup in the handler table.
        if (!catchable_by_js) break;
1694 1695 1696 1697
        // This code ref scope is here to avoid a check failure when looking up
        // the code. It's not actually necessary to keep the code alive as it's
        // currently being executed.
        wasm::WasmCodeRefScope code_ref_scope;
1698
        WasmCompiledFrame* wasm_frame = static_cast<WasmCompiledFrame*>(frame);
1699
        int stack_slots = 0;  // Will contain stack slot count of frame.
1700 1701 1702 1703 1704 1705
        int offset = wasm_frame->LookupExceptionHandlerInTable(&stack_slots);
        if (offset < 0) break;
        // Compute the stack pointer from the frame pointer. This ensures that
        // argument slots on the stack are dropped as returning would.
        Address return_sp = frame->fp() +
                            StandardFrameConstants::kFixedFrameSizeAboveFp -
1706
                            stack_slots * kSystemPointerSize;
eholk's avatar
eholk committed
1707

1708
        // This is going to be handled by Wasm, so we need to set the TLS flag
1709
        // again. It was cleared above assuming the frame would be unwound.
1710
        trap_handler::SetThreadInWasm();
eholk's avatar
eholk committed
1711

1712
        // Gather information from the frame.
1713 1714
        wasm::WasmCode* wasm_code =
            wasm_engine()->code_manager()->LookupCode(frame->pc());
1715
        return FoundHandler(Context(), wasm_code->instruction_start(), offset,
1716
                            wasm_code->constant_pool(), return_sp, frame->fp());
1717 1718
      }

1719 1720 1721 1722 1723 1724 1725 1726 1727 1728
      case StackFrame::WASM_COMPILE_LAZY: {
        // Can only fail directly on invocation. This happens if an invalid
        // function was validated lazily.
        DCHECK_IMPLIES(trap_handler::IsTrapHandlerEnabled(),
                       trap_handler::IsThreadInWasm());
        DCHECK(FLAG_wasm_lazy_validation);
        trap_handler::ClearThreadInWasm();
        break;
      }

1729 1730 1731 1732 1733 1734 1735 1736 1737 1738
      case StackFrame::OPTIMIZED: {
        // For optimized frames we perform a lookup in the handler table.
        if (!catchable_by_js) break;
        OptimizedFrame* js_frame = static_cast<OptimizedFrame*>(frame);
        int stack_slots = 0;  // Will contain stack slot count of frame.
        int offset =
            js_frame->LookupExceptionHandlerInTable(&stack_slots, nullptr);
        if (offset < 0) break;
        // Compute the stack pointer from the frame pointer. This ensures
        // that argument slots on the stack are dropped as returning would.
1739 1740
        Address return_sp = frame->fp() +
                            StandardFrameConstants::kFixedFrameSizeAboveFp -
1741
                            stack_slots * kSystemPointerSize;
1742 1743

        // Gather information from the frame.
1744
        Code code = frame->LookupCode();
1745

1746 1747
        // TODO(bmeurer): Turbofanned BUILTIN frames appear as OPTIMIZED,
        // but do not have a code kind of OPTIMIZED_FUNCTION.
1748 1749
        if (code.kind() == Code::OPTIMIZED_FUNCTION &&
            code.marked_for_deoptimization()) {
1750
          // If the target code is lazy deoptimized, we jump to the original
1751 1752
          // return address, but we make a note that we are throwing, so
          // that the deoptimizer can do the right thing.
1753
          offset = static_cast<int>(frame->pc() - code.entry());
1754 1755
          set_deoptimizer_lazy_throw(true);
        }
1756

1757 1758
        return FoundHandler(Context(), code.InstructionStart(), offset,
                            code.constant_pool(), return_sp, frame->fp());
1759
      }
1760

1761 1762 1763 1764
      case StackFrame::STUB: {
        // Some stubs are able to handle exceptions.
        if (!catchable_by_js) break;
        StubFrame* stub_frame = static_cast<StubFrame*>(frame);
1765
        wasm::WasmCodeRefScope code_ref_scope;
1766 1767 1768 1769 1770 1771
        wasm::WasmCode* wasm_code =
            wasm_engine()->code_manager()->LookupCode(frame->pc());
        if (wasm_code != nullptr) {
          // It is safe to skip Wasm runtime stubs as none of them contain local
          // exception handlers.
          CHECK_EQ(wasm::WasmCode::kRuntimeStub, wasm_code->kind());
1772
          CHECK_EQ(0, wasm_code->handler_table_size());
1773 1774
          break;
        }
1775
        Code code = stub_frame->LookupCode();
1776 1777
        if (!code.IsCode() || code.kind() != Code::BUILTIN ||
            !code.has_handler_table() || !code.is_turbofanned()) {
1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788
          break;
        }

        int stack_slots = 0;  // Will contain stack slot count of frame.
        int offset = stub_frame->LookupExceptionHandlerInTable(&stack_slots);
        if (offset < 0) break;

        // Compute the stack pointer from the frame pointer. This ensures
        // that argument slots on the stack are dropped as returning would.
        Address return_sp = frame->fp() +
                            StandardFrameConstants::kFixedFrameSizeAboveFp -
1789
                            stack_slots * kSystemPointerSize;
1790

1791 1792
        return FoundHandler(Context(), code.InstructionStart(), offset,
                            code.constant_pool(), return_sp, frame->fp());
1793 1794
      }

1795 1796 1797 1798
      case StackFrame::INTERPRETED: {
        // For interpreted frame we perform a range lookup in the handler table.
        if (!catchable_by_js) break;
        InterpretedFrame* js_frame = static_cast<InterpretedFrame*>(frame);
1799
        int register_slots = InterpreterFrameConstants::RegisterStackSlotCount(
1800
            js_frame->GetBytecodeArray().register_count());
1801 1802 1803 1804
        int context_reg = 0;  // Will contain register index holding context.
        int offset =
            js_frame->LookupExceptionHandlerInTable(&context_reg, nullptr);
        if (offset < 0) break;
1805 1806 1807 1808 1809 1810 1811
        // Compute the stack pointer from the frame pointer. This ensures that
        // argument slots on the stack are dropped as returning would.
        // Note: This is only needed for interpreted frames that have been
        //       materialized by the deoptimizer. If there is a handler frame
        //       in between then {frame->sp()} would already be correct.
        Address return_sp = frame->fp() -
                            InterpreterFrameConstants::kFixedFrameSizeFromFp -
1812
                            register_slots * kSystemPointerSize;
1813

1814 1815
        // Patch the bytecode offset in the interpreted frame to reflect the
        // position of the exception handler. The special builtin below will
1816 1817
        // take care of continuing to dispatch at that position. Also restore
        // the correct context for the handler from the interpreter register.
1818
        Context context =
1819
            Context::cast(js_frame->ReadInterpreterRegister(context_reg));
1820 1821
        js_frame->PatchBytecodeOffset(static_cast<int>(offset));

1822
        Code code =
1823
            builtins()->builtin(Builtins::kInterpreterEnterBytecodeDispatch);
1824 1825
        return FoundHandler(context, code.InstructionStart(), 0,
                            code.constant_pool(), return_sp, frame->fp());
1826
      }
1827

1828
      case StackFrame::BUILTIN:
1829
        // For builtin frames we are guaranteed not to find a handler.
1830 1831 1832 1833 1834 1835
        if (catchable_by_js) {
          CHECK_EQ(-1,
                   JavaScriptFrame::cast(frame)->LookupExceptionHandlerInTable(
                       nullptr, nullptr));
        }
        break;
1836

1837
      case StackFrame::WASM_INTERPRETER_ENTRY: {
1838 1839 1840
        if (trap_handler::IsThreadInWasm()) {
          trap_handler::ClearThreadInWasm();
        }
1841
      } break;
1842

1843 1844 1845 1846 1847 1848 1849
      case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
        // Builtin continuation frames with catch can handle exceptions.
        if (!catchable_by_js) break;
        JavaScriptBuiltinContinuationWithCatchFrame* js_frame =
            JavaScriptBuiltinContinuationWithCatchFrame::cast(frame);
        js_frame->SetException(exception);

1850
        // Reconstruct the stack pointer from the frame pointer.
1851
        Address return_sp = js_frame->fp() - js_frame->GetSPToFPDelta();
1852
        Code code = js_frame->LookupCode();
1853 1854
        return FoundHandler(Context(), code.InstructionStart(), 0,
                            code.constant_pool(), return_sp, frame->fp());
1855 1856
      } break;

1857 1858 1859 1860
      default:
        // All other types can not handle exception.
        break;
    }
1861

1862 1863 1864 1865 1866 1867
    if (frame->is_optimized()) {
      // Remove per-frame stored materialized objects.
      bool removed = materialized_object_store_->Remove(frame->fp());
      USE(removed);
      // If there were any materialized objects, the code should be
      // marked for deopt.
1868
      DCHECK_IMPLIES(removed, frame->LookupCode().marked_for_deoptimization());
1869 1870
    }
  }
1871

1872
  UNREACHABLE();
1873 1874
}

1875 1876 1877 1878 1879 1880 1881 1882
namespace {
HandlerTable::CatchPrediction PredictException(JavaScriptFrame* frame) {
  HandlerTable::CatchPrediction prediction;
  if (frame->is_optimized()) {
    if (frame->LookupExceptionHandlerInTable(nullptr, nullptr) > 0) {
      // This optimized frame will catch. It's handler table does not include
      // exception prediction, and we need to use the corresponding handler
      // tables on the unoptimized code objects.
1883
      std::vector<FrameSummary> summaries;
1884
      frame->Summarize(&summaries);
1885 1886
      for (size_t i = summaries.size(); i != 0; i--) {
        const FrameSummary& summary = summaries[i - 1];
1887
        Handle<AbstractCode> code = summary.AsJavaScript().abstract_code();
gsathya's avatar
gsathya committed
1888
        if (code->IsCode() && code->kind() == AbstractCode::BUILTIN) {
1889
          prediction = code->GetCode().GetBuiltinCatchPrediction();
1890 1891
          if (prediction == HandlerTable::UNCAUGHT) continue;
          return prediction;
1892
        }
gsathya's avatar
gsathya committed
1893

1894 1895
        // Must have been constructed from a bytecode array.
        CHECK_EQ(AbstractCode::INTERPRETED_FUNCTION, code->kind());
1896
        int code_offset = summary.code_offset();
1897 1898
        HandlerTable table(code->GetBytecodeArray());
        int index = table.LookupRange(code_offset, nullptr, &prediction);
1899 1900 1901 1902 1903 1904 1905 1906 1907 1908
        if (index <= 0) continue;
        if (prediction == HandlerTable::UNCAUGHT) continue;
        return prediction;
      }
    }
  } else if (frame->LookupExceptionHandlerInTable(nullptr, &prediction) > 0) {
    return prediction;
  }
  return HandlerTable::UNCAUGHT;
}
1909 1910 1911 1912 1913 1914 1915 1916 1917 1918 1919 1920 1921 1922 1923 1924 1925

Isolate::CatchType ToCatchType(HandlerTable::CatchPrediction prediction) {
  switch (prediction) {
    case HandlerTable::UNCAUGHT:
      return Isolate::NOT_CAUGHT;
    case HandlerTable::CAUGHT:
      return Isolate::CAUGHT_BY_JAVASCRIPT;
    case HandlerTable::PROMISE:
      return Isolate::CAUGHT_BY_PROMISE;
    case HandlerTable::DESUGARING:
      return Isolate::CAUGHT_BY_DESUGARING;
    case HandlerTable::ASYNC_AWAIT:
      return Isolate::CAUGHT_BY_ASYNC_AWAIT;
    default:
      UNREACHABLE();
  }
}
1926
}  // anonymous namespace
1927

1928 1929
Isolate::CatchType Isolate::PredictExceptionCatcher() {
  Address external_handler = thread_local_top()->try_catch_handler_address();
1930
  if (IsExternalHandlerOnTop(Object())) return CAUGHT_BY_EXTERNAL;
1931

1932
  // Search for an exception handler by performing a full walk over the stack.
1933 1934 1935
  for (StackFrameIterator iter(this); !iter.done(); iter.Advance()) {
    StackFrame* frame = iter.frame();

1936 1937
    switch (frame->type()) {
      case StackFrame::ENTRY:
1938
      case StackFrame::CONSTRUCT_ENTRY: {
1939
        Address entry_handler = frame->top_handler()->next_address();
1940 1941
        // The exception has been externally caught if and only if there is an
        // external handler which is on top of the top-most JS_ENTRY handler.
1942 1943 1944 1945
        if (external_handler != kNullAddress &&
            !try_catch_handler()->is_verbose_) {
          if (entry_handler == kNullAddress ||
              entry_handler > external_handler) {
1946 1947 1948 1949
            return CAUGHT_BY_EXTERNAL;
          }
        }
      } break;
1950

1951 1952 1953 1954 1955
      // For JavaScript frames we perform a lookup in the handler table.
      case StackFrame::OPTIMIZED:
      case StackFrame::INTERPRETED:
      case StackFrame::BUILTIN: {
        JavaScriptFrame* js_frame = JavaScriptFrame::cast(frame);
1956 1957 1958
        Isolate::CatchType prediction = ToCatchType(PredictException(js_frame));
        if (prediction == NOT_CAUGHT) break;
        return prediction;
1959
      } break;
1960

1961
      case StackFrame::STUB: {
1962
        Handle<Code> code(frame->LookupCode(), this);
1963
        if (!code->IsCode() || code->kind() != Code::BUILTIN ||
1964
            !code->has_handler_table() || !code->is_turbofanned()) {
1965
          break;
1966
        }
1967 1968 1969

        CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
        if (prediction != NOT_CAUGHT) return prediction;
1970 1971
      } break;

1972
      case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: {
1973
        Handle<Code> code(frame->LookupCode(), this);
1974 1975 1976 1977
        CatchType prediction = ToCatchType(code->GetBuiltinCatchPrediction());
        if (prediction != NOT_CAUGHT) return prediction;
      } break;

1978 1979 1980
      default:
        // All other types can not handle exception.
        break;
1981 1982 1983 1984
    }
  }

  // Handler not found.
1985
  return NOT_CAUGHT;
1986 1987
}

1988
Object Isolate::ThrowIllegalOperation() {
1989
  if (FLAG_stack_trace_on_illegal) PrintStack(stdout);
1990
  return Throw(ReadOnlyRoots(heap()).illegal_access_string());
1991 1992
}

1993
void Isolate::ScheduleThrow(Object exception) {
1994 1995 1996
  // When scheduling a throw we first throw the exception to get the
  // error reporting if it is uncaught before rescheduling it.
  Throw(exception);
1997
  PropagatePendingExceptionToExternalTryCatch();
1998 1999 2000 2001 2002
  if (has_pending_exception()) {
    thread_local_top()->scheduled_exception_ = pending_exception();
    thread_local_top()->external_caught_exception_ = false;
    clear_pending_exception();
  }
2003 2004
}

2005
void Isolate::RestorePendingMessageFromTryCatch(v8::TryCatch* handler) {
2006 2007 2008 2009
  DCHECK(handler == try_catch_handler());
  DCHECK(handler->HasCaught());
  DCHECK(handler->rethrow_);
  DCHECK(handler->capture_message_);
2010
  Object message(reinterpret_cast<Address>(handler->message_obj_));
2011
  DCHECK(message.IsJSMessageObject() || message.IsTheHole(this));
2012 2013 2014
  thread_local_top()->pending_message_obj_ = message;
}

2015
void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
2016
  DCHECK(has_scheduled_exception());
2017 2018
  if (reinterpret_cast<void*>(scheduled_exception().ptr()) ==
      handler->exception_) {
2019 2020
    DCHECK_NE(scheduled_exception(),
              ReadOnlyRoots(heap()).termination_exception());
2021
    clear_scheduled_exception();
2022 2023 2024 2025 2026 2027 2028 2029
  } else {
    DCHECK_EQ(scheduled_exception(),
              ReadOnlyRoots(heap()).termination_exception());
    // Clear termination once we returned from all V8 frames.
    if (handle_scope_implementer()->CallDepthIsZero()) {
      thread_local_top()->external_caught_exception_ = false;
      clear_scheduled_exception();
    }
2030
  }
2031
  if (reinterpret_cast<void*>(thread_local_top()->pending_message_obj_.ptr()) ==
2032
      handler->message_obj_) {
2033 2034
    clear_pending_message();
  }
2035 2036
}

2037 2038
Object Isolate::PromoteScheduledException() {
  Object thrown = scheduled_exception();
2039 2040 2041 2042 2043 2044
  clear_scheduled_exception();
  // Re-throw the exception to avoid getting repeated error reporting.
  return ReThrow(thrown);
}

void Isolate::PrintCurrentStackTrace(FILE* out) {
2045 2046 2047 2048 2049 2050 2051 2052 2053 2054 2055 2056
  CaptureStackTraceOptions options;
  options.limit = 0;
  options.skip_mode = SKIP_NONE;
  options.capture_builtin_exit_frames = true;
  options.async_stack_trace = FLAG_async_stack_traces;
  options.filter_mode = FrameArrayBuilder::CURRENT_SECURITY_CONTEXT;
  options.capture_only_frames_subject_to_debugging = false;
  options.enable_frame_caching = false;

  Handle<FixedArray> frames = Handle<FixedArray>::cast(
      CaptureStackTrace(this, this->factory()->undefined_value(), options));

2057
  IncrementalStringBuilder builder(this);
2058 2059
  for (int i = 0; i < frames->length(); ++i) {
    Handle<StackTraceFrame> frame(StackTraceFrame::cast(frames->get(i)), this);
2060

2061
    SerializeStackTraceFrame(this, frame, builder);
2062
  }
2063 2064 2065

  Handle<String> stack_trace = builder.Finish().ToHandleChecked();
  stack_trace->PrintOn(out);
2066 2067
}

2068
bool Isolate::ComputeLocation(MessageLocation* target) {
2069
  StackTraceFrameIterator it(this);
2070 2071 2072 2073 2074
  if (it.done()) return false;
  StandardFrame* frame = it.frame();
  // Compute the location from the function and the relocation info of the
  // baseline code. For optimized code this will use the deoptimization
  // information to get canonical location information.
2075
  std::vector<FrameSummary> frames;
2076
  wasm::WasmCodeRefScope code_ref_scope;
2077
  frame->Summarize(&frames);
2078
  FrameSummary& summary = frames.back();
2079
  Handle<SharedFunctionInfo> shared;
2080 2081
  Handle<Object> script = summary.script();
  if (!script->IsScript() ||
2082
      (Script::cast(*script).source().IsUndefined(this))) {
2083 2084 2085
    return false;
  }

2086
  if (summary.IsJavaScript()) {
2087
    shared = handle(summary.AsJavaScript().function()->shared(), this);
2088
  }
2089 2090 2091 2092 2093 2094 2095 2096
  if (summary.AreSourcePositionsAvailable()) {
    int pos = summary.SourcePosition();
    *target =
        MessageLocation(Handle<Script>::cast(script), pos, pos + 1, shared);
  } else {
    *target = MessageLocation(Handle<Script>::cast(script), shared,
                              summary.code_offset());
  }
2097
  return true;
2098 2099
}

2100 2101 2102 2103 2104
bool Isolate::ComputeLocationFromException(MessageLocation* target,
                                           Handle<Object> exception) {
  if (!exception->IsJSObject()) return false;

  Handle<Name> start_pos_symbol = factory()->error_start_pos_symbol();
2105
  Handle<Object> start_pos = JSReceiver::GetDataProperty(
2106 2107 2108 2109 2110
      Handle<JSObject>::cast(exception), start_pos_symbol);
  if (!start_pos->IsSmi()) return false;
  int start_pos_value = Handle<Smi>::cast(start_pos)->value();

  Handle<Name> end_pos_symbol = factory()->error_end_pos_symbol();
2111
  Handle<Object> end_pos = JSReceiver::GetDataProperty(
2112 2113 2114 2115 2116
      Handle<JSObject>::cast(exception), end_pos_symbol);
  if (!end_pos->IsSmi()) return false;
  int end_pos_value = Handle<Smi>::cast(end_pos)->value();

  Handle<Name> script_symbol = factory()->error_script_symbol();
2117
  Handle<Object> script = JSReceiver::GetDataProperty(
2118 2119 2120
      Handle<JSObject>::cast(exception), script_symbol);
  if (!script->IsScript()) return false;

2121
  Handle<Script> cast_script(Script::cast(*script), this);
2122 2123 2124 2125
  *target = MessageLocation(cast_script, start_pos_value, end_pos_value);
  return true;
}

2126
bool Isolate::ComputeLocationFromStackTrace(MessageLocation* target,
2127
                                            Handle<Object> exception) {
2128
  if (!exception->IsJSObject()) return false;
2129 2130
  Handle<Name> key = factory()->stack_trace_symbol();
  Handle<Object> property =
2131
      JSReceiver::GetDataProperty(Handle<JSObject>::cast(exception), key);
2132
  if (!property->IsFixedArray()) return false;
2133

2134 2135
  Handle<FrameArray> elements =
      GetFrameArrayFromStackTrace(this, Handle<FixedArray>::cast(property));
2136

jgruber's avatar
jgruber committed
2137 2138
  const int frame_count = elements->FrameCount();
  for (int i = 0; i < frame_count; i++) {
2139
    if (elements->IsWasmFrame(i) || elements->IsAsmJsWasmFrame(i)) {
2140
      Handle<WasmInstanceObject> instance(elements->WasmInstance(i), this);
2141
      uint32_t func_index =
2142 2143
          static_cast<uint32_t>(elements->WasmFunctionIndex(i).value());
      int code_offset = elements->Offset(i).value();
2144 2145
      bool is_at_number_conversion =
          elements->IsAsmJsWasmFrame(i) &&
2146
          elements->Flags(i).value() & FrameArray::kAsmJsAtNumberConversion;
2147 2148 2149
      // WasmCode* held alive by the {GlobalWasmCodeRef}.
      wasm::WasmCode* code =
          Managed<wasm::GlobalWasmCodeRef>::cast(elements->WasmCodeObject(i))
2150
              .get()
2151
              ->code();
2152 2153
      int byte_offset =
          FrameSummary::WasmCompiledFrameSummary::GetWasmSourcePosition(
2154
              code, code_offset);
2155 2156 2157
      int pos = WasmModuleObject::GetSourcePosition(
          handle(instance->module_object(), this), func_index, byte_offset,
          is_at_number_conversion);
2158
      Handle<Script> script(instance->module_object().script(), this);
2159

2160
      *target = MessageLocation(script, pos, pos + 1);
2161
      return true;
2162
    }
jgruber's avatar
jgruber committed
2163 2164

    Handle<JSFunction> fun = handle(elements->Function(i), this);
2165
    if (!fun->shared().IsSubjectToDebugging()) continue;
2166

2167 2168 2169
    Object script = fun->shared().script();
    if (script.IsScript() &&
        !(Script::cast(script).source().IsUndefined(this))) {
2170
      Handle<SharedFunctionInfo> shared = handle(fun->shared(), this);
2171

2172
      AbstractCode abstract_code = elements->Code(i);
2173
      const int code_offset = elements->Offset(i).value();
2174
      Handle<Script> casted_script(Script::cast(script), this);
2175
      if (shared->HasBytecodeArray() &&
2176 2177
          shared->GetBytecodeArray().HasSourcePositionTable()) {
        int pos = abstract_code.SourcePosition(code_offset);
2178 2179 2180 2181 2182
        *target = MessageLocation(casted_script, pos, pos + 1, shared);
      } else {
        *target = MessageLocation(casted_script, shared, code_offset);
      }

2183
      return true;
2184 2185
    }
  }
2186
  return false;
2187 2188
}

2189 2190
Handle<JSMessageObject> Isolate::CreateMessage(Handle<Object> exception,
                                               MessageLocation* location) {
2191
  Handle<FixedArray> stack_trace_object;
2192
  if (capture_stack_trace_for_uncaught_exceptions_) {
2193
    if (exception->IsJSError()) {
2194
      // We fetch the stack trace that corresponds to this error object.
2195 2196 2197 2198 2199
      // If the lookup fails, the exception is probably not a valid Error
      // object. In that case, we fall through and capture the stack trace
      // at this throw site.
      stack_trace_object =
          GetDetailedStackTrace(Handle<JSObject>::cast(exception));
2200 2201
    }
    if (stack_trace_object.is_null()) {
2202
      // Not an error object, we capture stack and location at throw site.
2203 2204 2205 2206 2207
      stack_trace_object = CaptureCurrentStackTrace(
          stack_trace_for_uncaught_exceptions_frame_limit_,
          stack_trace_for_uncaught_exceptions_options_);
    }
  }
2208
  MessageLocation computed_location;
2209
  if (location == nullptr &&
2210 2211 2212 2213
      (ComputeLocationFromException(&computed_location, exception) ||
       ComputeLocationFromStackTrace(&computed_location, exception) ||
       ComputeLocation(&computed_location))) {
    location = &computed_location;
2214
  }
2215

2216 2217 2218
  return MessageHandler::MakeMessageObject(
      this, MessageTemplate::kUncaughtException, location, exception,
      stack_trace_object);
2219 2220
}

2221
bool Isolate::IsJavaScriptHandlerOnTop(Object exception) {
2222
  DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
2223 2224 2225 2226 2227 2228

  // For uncatchable exceptions, the JavaScript handler cannot be on top.
  if (!is_catchable_by_javascript(exception)) return false;

  // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
  Address entry_handler = Isolate::handler(thread_local_top());
2229
  if (entry_handler == kNullAddress) return false;
2230

2231 2232
  // Get the address of the external handler so we can compare the address to
  // determine which one is closer to the top of the stack.
2233
  Address external_handler = thread_local_top()->try_catch_handler_address();
2234
  if (external_handler == kNullAddress) return true;
2235 2236 2237

  // The exception has been externally caught if and only if there is an
  // external handler which is on top of the top-most JS_ENTRY handler.
2238
  //
2239 2240 2241 2242 2243
  // Note, that finally clauses would re-throw an exception unless it's aborted
  // by jumps in control flow (like return, break, etc.) and we'll have another
  // chance to set proper v8::TryCatch later.
  return (entry_handler < external_handler);
}
2244

2245
bool Isolate::IsExternalHandlerOnTop(Object exception) {
2246
  DCHECK_NE(ReadOnlyRoots(heap()).the_hole_value(), exception);
2247 2248 2249 2250

  // Get the address of the external handler so we can compare the address to
  // determine which one is closer to the top of the stack.
  Address external_handler = thread_local_top()->try_catch_handler_address();
2251
  if (external_handler == kNullAddress) return false;
2252 2253 2254 2255 2256 2257

  // For uncatchable exceptions, the external handler is always on top.
  if (!is_catchable_by_javascript(exception)) return true;

  // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
  Address entry_handler = Isolate::handler(thread_local_top());
2258
  if (entry_handler == kNullAddress) return true;
2259 2260 2261 2262 2263 2264 2265 2266

  // The exception has been externally caught if and only if there is an
  // external handler which is on top of the top-most JS_ENTRY handler.
  //
  // Note, that finally clauses would re-throw an exception unless it's aborted
  // by jumps in control flow (like return, break, etc.) and we'll have another
  // chance to set proper v8::TryCatch later.
  return (entry_handler > external_handler);
2267 2268
}

2269
void Isolate::ReportPendingMessagesImpl(bool report_externally) {
2270
  Object exception_obj = pending_exception();
2271

2272
  // Clear the pending message object early to avoid endless recursion.
2273
  Object message_obj = thread_local_top()->pending_message_obj_;
2274 2275 2276 2277
  clear_pending_message();

  // For uncatchable exceptions we do nothing. If needed, the exception and the
  // message have already been propagated to v8::TryCatch.
2278
  if (!is_catchable_by_javascript(exception_obj)) return;
2279

2280
  // Determine whether the message needs to be reported to all message handlers
2281 2282
  // depending on whether and external v8::TryCatch or an internal JavaScript
  // handler is on top.
2283
  bool should_report_exception;
2284
  if (report_externally) {
2285 2286
    // Only report the exception if the external handler is verbose.
    should_report_exception = try_catch_handler()->is_verbose_;
2287
  } else {
2288
    // Report the exception if it isn't caught by JavaScript code.
2289
    should_report_exception = !IsJavaScriptHandlerOnTop(exception_obj);
2290 2291 2292
  }

  // Actually report the pending message to all message handlers.
2293
  if (!message_obj.IsTheHole(this) && should_report_exception) {
2294
    HandleScope scope(this);
2295
    Handle<JSMessageObject> message(JSMessageObject::cast(message_obj), this);
2296
    Handle<Object> exception(exception_obj, this);
2297
    Handle<Script> script(message->script(), this);
2298 2299 2300 2301
    // Clear the exception and restore it afterwards, otherwise
    // CollectSourcePositions will abort.
    clear_pending_exception();
    JSMessageObject::EnsureSourcePositionsAvailable(this, message);
2302
    set_pending_exception(*exception);
2303 2304
    int start_pos = message->GetStartPosition();
    int end_pos = message->GetEndPosition();
2305 2306
    MessageLocation location(script, start_pos, end_pos);
    MessageHandler::ReportMessage(this, &location, message);
2307 2308 2309
  }
}

2310 2311 2312 2313 2314 2315
void Isolate::ReportPendingMessages() {
  DCHECK(AllowExceptions::IsAllowed(this));

  // The embedder might run script in response to an exception.
  AllowJavascriptExecutionDebugOnly allow_script(this);

2316
  Object exception = pending_exception();
2317 2318 2319 2320 2321 2322 2323 2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334

  // Try to propagate the exception to an external v8::TryCatch handler. If
  // propagation was unsuccessful, then we will get another chance at reporting
  // the pending message if the exception is re-thrown.
  bool has_been_propagated = PropagatePendingExceptionToExternalTryCatch();
  if (!has_been_propagated) return;

  ReportPendingMessagesImpl(IsExternalHandlerOnTop(exception));
}

void Isolate::ReportPendingMessagesFromJavaScript() {
  DCHECK(AllowExceptions::IsAllowed(this));

  auto IsHandledByJavaScript = [=]() {
    // In this situation, the exception is always a non-terminating exception.

    // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
    Address entry_handler = Isolate::handler(thread_local_top());
2335
    DCHECK_NE(entry_handler, kNullAddress);
2336
    entry_handler = StackHandler::FromAddress(entry_handler)->next_address();
2337 2338 2339 2340

    // Get the address of the external handler so we can compare the address to
    // determine which one is closer to the top of the stack.
    Address external_handler = thread_local_top()->try_catch_handler_address();
2341
    if (external_handler == kNullAddress) return true;
2342 2343 2344 2345 2346 2347

    return (entry_handler < external_handler);
  };

  auto IsHandledExternally = [=]() {
    Address external_handler = thread_local_top()->try_catch_handler_address();
2348
    if (external_handler == kNullAddress) return false;
2349 2350 2351

    // Get the top-most JS_ENTRY handler, cannot be on top if it doesn't exist.
    Address entry_handler = Isolate::handler(thread_local_top());
2352
    DCHECK_NE(entry_handler, kNullAddress);
2353
    entry_handler = StackHandler::FromAddress(entry_handler)->next_address();
2354 2355 2356 2357 2358
    return (entry_handler > external_handler);
  };

  auto PropagateToExternalHandler = [=]() {
    if (IsHandledByJavaScript()) {
2359
      thread_local_top()->external_caught_exception_ = false;
2360 2361 2362 2363
      return false;
    }

    if (!IsHandledExternally()) {
2364
      thread_local_top()->external_caught_exception_ = false;
2365 2366 2367
      return true;
    }

2368
    thread_local_top()->external_caught_exception_ = true;
2369
    v8::TryCatch* handler = try_catch_handler();
2370 2371
    DCHECK(thread_local_top()->pending_message_obj_.IsJSMessageObject() ||
           thread_local_top()->pending_message_obj_.IsTheHole(this));
2372 2373
    handler->can_continue_ = true;
    handler->has_terminated_ = false;
2374
    handler->exception_ = reinterpret_cast<void*>(pending_exception().ptr());
2375
    // Propagate to the external try-catch only if we got an actual message.
2376
    if (thread_local_top()->pending_message_obj_.IsTheHole(this)) return true;
2377

2378
    handler->message_obj_ =
2379
        reinterpret_cast<void*>(thread_local_top()->pending_message_obj_.ptr());
2380 2381 2382 2383 2384 2385 2386 2387
    return true;
  };

  // Try to propagate to an external v8::TryCatch handler.
  if (!PropagateToExternalHandler()) return;

  ReportPendingMessagesImpl(true);
}
2388

2389
bool Isolate::OptionalRescheduleException(bool clear_exception) {
2390
  DCHECK(has_pending_exception());
2391
  PropagatePendingExceptionToExternalTryCatch();
2392

2393
  bool is_termination_exception =
2394
      pending_exception() == ReadOnlyRoots(this).termination_exception();
2395

2396
  if (is_termination_exception) {
2397
    if (clear_exception) {
2398 2399 2400 2401
      thread_local_top()->external_caught_exception_ = false;
      clear_pending_exception();
      return false;
    }
2402 2403 2404 2405
  } else if (thread_local_top()->external_caught_exception_) {
    // If the exception is externally caught, clear it if there are no
    // JavaScript frames on the way to the C++ frame that has the
    // external handler.
2406
    DCHECK_NE(thread_local_top()->try_catch_handler_address(), kNullAddress);
2407 2408 2409 2410 2411 2412 2413 2414 2415 2416 2417 2418 2419
    Address external_handler_address =
        thread_local_top()->try_catch_handler_address();
    JavaScriptFrameIterator it(this);
    if (it.done() || (it.frame()->sp() > external_handler_address)) {
      clear_exception = true;
    }
  }

  // Clear the exception if needed.
  if (clear_exception) {
    thread_local_top()->external_caught_exception_ = false;
    clear_pending_exception();
    return false;
2420 2421 2422 2423 2424 2425 2426 2427
  }

  // Reschedule the exception.
  thread_local_top()->scheduled_exception_ = pending_exception();
  clear_pending_exception();
  return true;
}

2428
void Isolate::PushPromise(Handle<JSObject> promise) {
2429 2430
  ThreadLocalTop* tltop = thread_local_top();
  PromiseOnStack* prev = tltop->promise_on_stack_;
2431
  Handle<JSObject> global_promise = global_handles()->Create(*promise);
2432
  tltop->promise_on_stack_ = new PromiseOnStack(global_promise, prev);
2433 2434 2435 2436
}

void Isolate::PopPromise() {
  ThreadLocalTop* tltop = thread_local_top();
2437
  if (tltop->promise_on_stack_ == nullptr) return;
2438
  PromiseOnStack* prev = tltop->promise_on_stack_->prev();
2439
  Handle<Object> global_promise = tltop->promise_on_stack_->promise();
2440 2441
  delete tltop->promise_on_stack_;
  tltop->promise_on_stack_ = prev;
2442
  global_handles()->Destroy(global_promise.location());
2443 2444
}

2445 2446 2447 2448 2449
namespace {
bool InternalPromiseHasUserDefinedRejectHandler(Isolate* isolate,
                                                Handle<JSPromise> promise);

bool PromiseHandlerCheck(Isolate* isolate, Handle<JSReceiver> handler,
2450
                         Handle<JSReceiver> deferred_promise) {
2451 2452 2453 2454 2455 2456
  // Recurse to the forwarding Promise, if any. This may be due to
  //  - await reaction forwarding to the throwaway Promise, which has
  //    a dependency edge to the outer Promise.
  //  - PromiseIdResolveHandler forwarding to the output of .then
  //  - Promise.all/Promise.race forwarding to a throwaway Promise, which
  //    has a dependency edge to the generated outer Promise.
2457
  // Otherwise, this is a real reject handler for the Promise.
2458 2459 2460 2461 2462 2463
  Handle<Symbol> key = isolate->factory()->promise_forwarding_handler_symbol();
  Handle<Object> forwarding_handler = JSReceiver::GetDataProperty(handler, key);
  if (forwarding_handler->IsUndefined(isolate)) {
    return true;
  }

2464
  if (!deferred_promise->IsJSPromise()) {
2465 2466 2467 2468
    return true;
  }

  return InternalPromiseHasUserDefinedRejectHandler(
2469
      isolate, Handle<JSPromise>::cast(deferred_promise));
2470 2471 2472 2473 2474 2475 2476 2477 2478 2479 2480 2481 2482 2483 2484 2485 2486 2487 2488 2489 2490
}

bool InternalPromiseHasUserDefinedRejectHandler(Isolate* isolate,
                                                Handle<JSPromise> promise) {
  // If this promise was marked as being handled by a catch block
  // in an async function, then it has a user-defined reject handler.
  if (promise->handled_hint()) return true;

  // If this Promise is subsumed by another Promise (a Promise resolved
  // with another Promise, or an intermediate, hidden, throwaway Promise
  // within async/await), then recurse on the outer Promise.
  // In this case, the dependency is one possible way that the Promise
  // could be resolved, so it does not subsume the other following cases.
  Handle<Symbol> key = isolate->factory()->promise_handled_by_symbol();
  Handle<Object> outer_promise_obj = JSObject::GetDataProperty(promise, key);
  if (outer_promise_obj->IsJSPromise() &&
      InternalPromiseHasUserDefinedRejectHandler(
          isolate, Handle<JSPromise>::cast(outer_promise_obj))) {
    return true;
  }

2491
  if (promise->status() == Promise::kPending) {
2492 2493 2494 2495 2496
    for (Handle<Object> current(promise->reactions(), isolate);
         !current->IsSmi();) {
      Handle<PromiseReaction> reaction = Handle<PromiseReaction>::cast(current);
      Handle<HeapObject> promise_or_capability(
          reaction->promise_or_capability(), isolate);
2497 2498 2499 2500 2501 2502 2503
      if (!promise_or_capability->IsUndefined(isolate)) {
        Handle<JSPromise> promise = Handle<JSPromise>::cast(
            promise_or_capability->IsJSPromise()
                ? promise_or_capability
                : handle(Handle<PromiseCapability>::cast(promise_or_capability)
                             ->promise(),
                         isolate));
2504
        if (reaction->reject_handler().IsUndefined(isolate)) {
2505 2506 2507 2508 2509 2510 2511 2512 2513
          if (InternalPromiseHasUserDefinedRejectHandler(isolate, promise)) {
            return true;
          }
        } else {
          Handle<JSReceiver> current_handler(
              JSReceiver::cast(reaction->reject_handler()), isolate);
          if (PromiseHandlerCheck(isolate, current_handler, promise)) {
            return true;
          }
2514
        }
2515
      }
2516
      current = handle(reaction->next(), isolate);
2517 2518 2519
    }
  }

2520 2521
  return false;
}
2522

2523 2524 2525 2526 2527 2528 2529 2530
}  // namespace

bool Isolate::PromiseHasUserDefinedRejectHandler(Handle<Object> promise) {
  if (!promise->IsJSPromise()) return false;
  return InternalPromiseHasUserDefinedRejectHandler(
      this, Handle<JSPromise>::cast(promise));
}

2531 2532 2533
Handle<Object> Isolate::GetPromiseOnStackOnThrow() {
  Handle<Object> undefined = factory()->undefined_value();
  ThreadLocalTop* tltop = thread_local_top();
2534
  if (tltop->promise_on_stack_ == nullptr) return undefined;
2535
  // Find the top-most try-catch or try-finally handler.
2536 2537 2538 2539
  CatchType prediction = PredictExceptionCatcher();
  if (prediction == NOT_CAUGHT || prediction == CAUGHT_BY_EXTERNAL) {
    return undefined;
  }
2540 2541
  Handle<Object> retval = undefined;
  PromiseOnStack* promise_on_stack = tltop->promise_on_stack_;
2542 2543 2544 2545 2546 2547
  for (StackFrameIterator it(this); !it.done(); it.Advance()) {
    StackFrame* frame = it.frame();
    HandlerTable::CatchPrediction catch_prediction;
    if (frame->is_java_script()) {
      catch_prediction = PredictException(JavaScriptFrame::cast(frame));
    } else if (frame->type() == StackFrame::STUB) {
2548
      Code code = frame->LookupCode();
2549 2550
      if (!code.IsCode() || code.kind() != Code::BUILTIN ||
          !code.has_handler_table() || !code.is_turbofanned()) {
2551 2552
        continue;
      }
2553
      catch_prediction = code.GetBuiltinCatchPrediction();
2554 2555 2556 2557 2558
    } else {
      continue;
    }

    switch (catch_prediction) {
2559
      case HandlerTable::UNCAUGHT:
2560
        continue;
2561
      case HandlerTable::CAUGHT:
2562
      case HandlerTable::DESUGARING:
2563
        if (retval->IsJSPromise()) {
2564 2565 2566 2567 2568 2569 2570
          // Caught the result of an inner async/await invocation.
          // Mark the inner promise as caught in the "synchronous case" so
          // that Debug::OnException will see. In the synchronous case,
          // namely in the code in an async function before the first
          // await, the function which has this exception event has not yet
          // returned, so the generated Promise has not yet been marked
          // by AsyncFunctionAwaitCaught with promiseHandledHintSymbol.
2571
          Handle<JSPromise>::cast(retval)->set_handled_hint(true);
2572 2573
        }
        return retval;
2574
      case HandlerTable::PROMISE:
2575 2576 2577
        return promise_on_stack
                   ? Handle<Object>::cast(promise_on_stack->promise())
                   : undefined;
2578 2579 2580 2581 2582 2583
      case HandlerTable::ASYNC_AWAIT: {
        // If in the initial portion of async/await, continue the loop to pop up
        // successive async/await stack frames until an asynchronous one with
        // dependents is found, or a non-async stack frame is encountered, in
        // order to handle the synchronous async/await catch prediction case:
        // assume that async function calls are awaited.
2584
        if (!promise_on_stack) return retval;
2585 2586 2587 2588 2589 2590 2591
        retval = promise_on_stack->promise();
        if (PromiseHasUserDefinedRejectHandler(retval)) {
          return retval;
        }
        promise_on_stack = promise_on_stack->prev();
        continue;
      }
2592 2593
    }
  }
2594
  return retval;
2595 2596
}

2597
void Isolate::SetCaptureStackTraceForUncaughtExceptions(
2598
    bool capture, int frame_limit, StackTrace::StackTraceOptions options) {
2599 2600 2601 2602 2603
  capture_stack_trace_for_uncaught_exceptions_ = capture;
  stack_trace_for_uncaught_exceptions_frame_limit_ = frame_limit;
  stack_trace_for_uncaught_exceptions_options_ = options;
}

2604 2605 2606 2607 2608
void Isolate::SetAbortOnUncaughtExceptionCallback(
    v8::Isolate::AbortOnUncaughtExceptionCallback callback) {
  abort_on_uncaught_exception_callback_ = callback;
}

2609 2610 2611 2612 2613 2614 2615 2616
bool Isolate::AreWasmThreadsEnabled(Handle<Context> context) {
  if (wasm_threads_enabled_callback()) {
    v8::Local<v8::Context> api_context = v8::Utils::ToLocal(context);
    return wasm_threads_enabled_callback()(api_context);
  }
  return FLAG_experimental_wasm_threads;
}

2617 2618 2619 2620 2621
Handle<Context> Isolate::GetIncumbentContext() {
  JavaScriptFrameIterator it(this);

  // 1st candidate: most-recently-entered author function's context
  // if it's newer than the last Context::BackupIncumbentScope entry.
2622 2623
  //
  // NOTE: This code assumes that the stack grows downward.
2624
  Address top_backup_incumbent =
2625 2626 2627
      top_backup_incumbent_scope()
          ? top_backup_incumbent_scope()->JSStackComparableAddress()
          : 0;
2628 2629
  if (!it.done() &&
      (!top_backup_incumbent || it.frame()->sp() < top_backup_incumbent)) {
2630
    Context context = Context::cast(it.frame()->context());
2631
    return Handle<Context>(context.native_context(), this);
2632 2633 2634 2635 2636 2637 2638 2639
  }

  // 2nd candidate: the last Context::Scope's incumbent context if any.
  if (top_backup_incumbent_scope()) {
    return Utils::OpenHandle(
        *top_backup_incumbent_scope()->backup_incumbent_context_);
  }

2640
  // Last candidate: the entered context or microtask context.
2641 2642 2643 2644
  // Given that there is no other author function is running, there must be
  // no cross-context function running, then the incumbent realm must match
  // the entry realm.
  v8::Local<v8::Context> entered_context =
2645
      reinterpret_cast<v8::Isolate*>(this)->GetEnteredOrMicrotaskContext();
2646 2647
  return Utils::OpenHandle(*entered_context);
}
2648 2649

char* Isolate::ArchiveThread(char* to) {
2650 2651
  MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
          sizeof(ThreadLocalTop));
2652
  InitializeThreadLocal();
2653 2654 2655
  clear_pending_exception();
  clear_pending_message();
  clear_scheduled_exception();
2656 2657 2658 2659
  return to + sizeof(ThreadLocalTop);
}

char* Isolate::RestoreThread(char* from) {
2660 2661 2662 2663
  MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
          sizeof(ThreadLocalTop));
// This might be just paranoia, but it seems to be needed in case a
// thread_local_top_ is restored on a separate OS thread.
2664 2665 2666
#ifdef USE_SIMULATOR
  thread_local_top()->simulator_ = Simulator::current(this);
#endif
2667
  DCHECK(context().is_null() || context().IsContext());
2668 2669 2670
  return from + sizeof(ThreadLocalTop);
}

2671
void Isolate::ReleaseSharedPtrs() {
2672
  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2673 2674 2675 2676 2677 2678 2679 2680 2681
  while (managed_ptr_destructors_head_) {
    ManagedPtrDestructor* l = managed_ptr_destructors_head_;
    ManagedPtrDestructor* n = nullptr;
    managed_ptr_destructors_head_ = nullptr;
    for (; l != nullptr; l = n) {
      l->destructor_(l->shared_ptr_ptr_);
      n = l->next_;
      delete l;
    }
2682 2683 2684
  }
}

2685
void Isolate::RegisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2686
  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2687 2688 2689 2690 2691 2692 2693
  DCHECK_NULL(destructor->prev_);
  DCHECK_NULL(destructor->next_);
  if (managed_ptr_destructors_head_) {
    managed_ptr_destructors_head_->prev_ = destructor;
  }
  destructor->next_ = managed_ptr_destructors_head_;
  managed_ptr_destructors_head_ = destructor;
2694 2695
}

2696
void Isolate::UnregisterManagedPtrDestructor(ManagedPtrDestructor* destructor) {
2697
  base::MutexGuard lock(&managed_ptr_destructors_mutex_);
2698 2699 2700 2701 2702 2703 2704 2705 2706
  if (destructor->prev_) {
    destructor->prev_->next_ = destructor->next_;
  } else {
    DCHECK_EQ(destructor, managed_ptr_destructors_head_);
    managed_ptr_destructors_head_ = destructor->next_;
  }
  if (destructor->next_) destructor->next_->prev_ = destructor->prev_;
  destructor->prev_ = nullptr;
  destructor->next_ = nullptr;
2707
}
2708

2709 2710 2711 2712 2713 2714
void Isolate::SetWasmEngine(std::shared_ptr<wasm::WasmEngine> engine) {
  DCHECK_NULL(wasm_engine_);  // Only call once before {Init}.
  wasm_engine_ = std::move(engine);
  wasm_engine_->AddIsolate(this);
}

2715
// NOLINTNEXTLINE
2716 2717 2718 2719 2720 2721
Isolate::PerIsolateThreadData::~PerIsolateThreadData() {
#if defined(USE_SIMULATOR)
  delete simulator_;
#endif
}

2722 2723 2724 2725 2726
Isolate::PerIsolateThreadData* Isolate::ThreadDataTable::Lookup(
    ThreadId thread_id) {
  auto t = table_.find(thread_id);
  if (t == table_.end()) return nullptr;
  return t->second;
2727 2728 2729
}

void Isolate::ThreadDataTable::Insert(Isolate::PerIsolateThreadData* data) {
2730 2731
  bool inserted = table_.insert(std::make_pair(data->thread_id_, data)).second;
  CHECK(inserted);
2732 2733 2734
}

void Isolate::ThreadDataTable::Remove(PerIsolateThreadData* data) {
2735
  table_.erase(data->thread_id_);
2736
  delete data;
2737 2738
}

2739 2740 2741
void Isolate::ThreadDataTable::RemoveAllThreads() {
  for (auto& x : table_) {
    delete x.second;
2742
  }
2743
  table_.clear();
2744 2745
}

2746
class VerboseAccountingAllocator : public AccountingAllocator {
2747
 public:
2748 2749 2750 2751 2752 2753 2754 2755 2756 2757 2758
  VerboseAccountingAllocator(Heap* heap, size_t allocation_sample_bytes)
      : heap_(heap), allocation_sample_bytes_(allocation_sample_bytes) {}

  v8::internal::Segment* AllocateSegment(size_t size) override {
    v8::internal::Segment* memory = AccountingAllocator::AllocateSegment(size);
    if (!memory) return nullptr;
    size_t malloced_current = GetCurrentMemoryUsage();

    if (last_memory_usage_ + allocation_sample_bytes_ < malloced_current) {
      PrintMemoryJSON(malloced_current);
      last_memory_usage_ = malloced_current;
2759 2760 2761 2762
    }
    return memory;
  }

2763 2764
  void ReturnSegment(v8::internal::Segment* memory) override {
    AccountingAllocator::ReturnSegment(memory);
2765 2766
    size_t malloced_current = GetCurrentMemoryUsage();

2767 2768
    if (malloced_current + allocation_sample_bytes_ < last_memory_usage_) {
      PrintMemoryJSON(malloced_current);
2769
      last_memory_usage_ = malloced_current;
2770 2771 2772
    }
  }

heimbuef's avatar
heimbuef committed
2773
  void ZoneCreation(const Zone* zone) override {
2774
    PrintZoneModificationSample(zone, "zonecreation");
2775
    nesting_deepth_++;
heimbuef's avatar
heimbuef committed
2776 2777 2778
  }

  void ZoneDestruction(const Zone* zone) override {
2779
    nesting_deepth_--;
2780 2781 2782 2783 2784
    PrintZoneModificationSample(zone, "zonedestruction");
  }

 private:
  void PrintZoneModificationSample(const Zone* zone, const char* type) {
heimbuef's avatar
heimbuef committed
2785 2786
    PrintF(
        "{"
2787
        "\"type\": \"%s\", "
heimbuef's avatar
heimbuef committed
2788 2789 2790 2791
        "\"isolate\": \"%p\", "
        "\"time\": %f, "
        "\"ptr\": \"%p\", "
        "\"name\": \"%s\", "
2792
        "\"size\": %zu,"
Hannes Payer's avatar
Hannes Payer committed
2793
        "\"nesting\": %zu}\n",
2794 2795
        type, reinterpret_cast<void*>(heap_->isolate()),
        heap_->isolate()->time_millis_since_init(),
heimbuef's avatar
heimbuef committed
2796
        reinterpret_cast<const void*>(zone), zone->name(),
2797
        zone->allocation_size(), nesting_deepth_.load());
heimbuef's avatar
heimbuef committed
2798 2799
  }

2800
  void PrintMemoryJSON(size_t malloced) {
2801 2802 2803 2804 2805
    // Note: Neither isolate, nor heap is locked, so be careful with accesses
    // as the allocator is potentially used on a concurrent thread.
    double time = heap_->isolate()->time_millis_since_init();
    PrintF(
        "{"
2806
        "\"type\": \"zone\", "
2807 2808
        "\"isolate\": \"%p\", "
        "\"time\": %f, "
2809
        "\"allocated\": %zu}\n",
2810
        reinterpret_cast<void*>(heap_->isolate()), time, malloced);
2811 2812 2813
  }

  Heap* heap_;
2814 2815 2816
  std::atomic<size_t> last_memory_usage_{0};
  std::atomic<size_t> nesting_deepth_{0};
  size_t allocation_sample_bytes_;
2817 2818
};

2819
#ifdef DEBUG
2820
std::atomic<size_t> Isolate::non_disposed_isolates_;
2821 2822
#endif  // DEBUG

2823
// static
2824 2825 2826 2827 2828 2829 2830 2831
Isolate* Isolate::New(IsolateAllocationMode mode) {
  // IsolateAllocator allocates the memory for the Isolate object according to
  // the given allocation mode.
  std::unique_ptr<IsolateAllocator> isolate_allocator =
      base::make_unique<IsolateAllocator>(mode);
  // Construct Isolate object in the allocated memory.
  void* isolate_ptr = isolate_allocator->isolate_memory();
  Isolate* isolate = new (isolate_ptr) Isolate(std::move(isolate_allocator));
2832
#if V8_TARGET_ARCH_64_BIT
2833 2834 2835
  DCHECK_IMPLIES(
      mode == IsolateAllocationMode::kInV8Heap,
      IsAligned(isolate->isolate_root(), kPtrComprIsolateRootAlignment));
2836
#endif
2837 2838 2839 2840 2841 2842 2843 2844 2845 2846 2847 2848 2849 2850 2851 2852

#ifdef DEBUG
  non_disposed_isolates_++;
#endif  // DEBUG

  return isolate;
}

// static
void Isolate::Delete(Isolate* isolate) {
  DCHECK_NOT_NULL(isolate);
  // Temporarily set this isolate as current so that various parts of
  // the isolate can access it in their destructors without having a
  // direct pointer. We don't use Enter/Exit here to avoid
  // initializing the thread data.
  PerIsolateThreadData* saved_data = isolate->CurrentPerIsolateThreadData();
2853
  DCHECK_EQ(true, isolate_key_created_.load(std::memory_order_relaxed));
2854 2855 2856 2857 2858 2859 2860 2861 2862 2863
  Isolate* saved_isolate = reinterpret_cast<Isolate*>(
      base::Thread::GetThreadLocal(isolate->isolate_key_));
  SetIsolateThreadLocals(isolate, nullptr);

  isolate->Deinit();

#ifdef DEBUG
  non_disposed_isolates_--;
#endif  // DEBUG

2864 2865 2866 2867 2868 2869 2870
  // Take ownership of the IsolateAllocator to ensure the Isolate memory will
  // be available during Isolate descructor call.
  std::unique_ptr<IsolateAllocator> isolate_allocator =
      std::move(isolate->isolate_allocator_);
  isolate->~Isolate();
  // Now free the memory owned by the allocator.
  isolate_allocator.reset();
2871 2872 2873 2874 2875

  // Restore the previous current isolate.
  SetIsolateThreadLocals(saved_isolate, saved_data);
}

2876 2877 2878 2879 2880 2881 2882
void Isolate::SetUpFromReadOnlyHeap(ReadOnlyHeap* ro_heap) {
  DCHECK_NOT_NULL(ro_heap);
  DCHECK_IMPLIES(read_only_heap_ != nullptr, read_only_heap_ == ro_heap);
  read_only_heap_ = ro_heap;
  heap_.SetUpFromReadOnlyHeap(ro_heap);
}

2883 2884 2885 2886 2887
v8::PageAllocator* Isolate::page_allocator() {
  return isolate_allocator_->page_allocator();
}

Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator)
2888 2889
    : isolate_data_(this),
      isolate_allocator_(std::move(isolate_allocator)),
2890
      id_(isolate_counter.fetch_add(1, std::memory_order_relaxed)),
2891 2892 2893
      allocator_(FLAG_trace_zone_stats
                     ? new VerboseAccountingAllocator(&heap_, 256 * KB)
                     : new AccountingAllocator()),
2894
      builtins_(this),
2895
      rail_mode_(PERFORMANCE_ANIMATION),
2896
      code_event_dispatcher_(new CodeEventDispatcher()),
2897
      cancelable_task_manager_(new CancelableTaskManager()) {
2898
  TRACE_ISOLATE(constructor);
2899
  CheckIsolateLayout();
2900

2901 2902
  // ThreadManager is initialized early to support locking an isolate
  // before it is entered.
2903
  thread_manager_ = new ThreadManager(this);
2904

2905 2906
  handle_scope_data_.Initialize();

2907
#define ISOLATE_INIT_EXECUTE(type, name, initial_value) \
2908 2909 2910 2911
  name##_ = (initial_value);
  ISOLATE_INIT_LIST(ISOLATE_INIT_EXECUTE)
#undef ISOLATE_INIT_EXECUTE

2912
#define ISOLATE_INIT_ARRAY_EXECUTE(type, name, length) \
2913 2914 2915
  memset(name##_, 0, sizeof(type) * length);
  ISOLATE_INIT_ARRAY_LIST(ISOLATE_INIT_ARRAY_EXECUTE)
#undef ISOLATE_INIT_ARRAY_EXECUTE
2916 2917 2918

  InitializeLoggingAndCounters();
  debug_ = new Debug(this);
2919

2920
  InitializeDefaultEmbeddedBlob();
2921 2922

  MicrotaskQueue::SetUpDefaultMicrotaskQueue(this);
2923 2924
}

2925 2926 2927 2928 2929 2930 2931 2932 2933 2934 2935 2936 2937 2938 2939 2940 2941 2942
void Isolate::CheckIsolateLayout() {
  CHECK_EQ(OFFSET_OF(Isolate, isolate_data_), 0);
  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.embedder_data_)),
           Internals::kIsolateEmbedderDataOffset);
  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_)),
           Internals::kIsolateRootsOffset);
  CHECK_EQ(Internals::kExternalMemoryOffset % 8, 0);
  CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.external_memory_)),
           Internals::kExternalMemoryOffset);
  CHECK_EQ(Internals::kExternalMemoryLimitOffset % 8, 0);
  CHECK_EQ(static_cast<int>(
               OFFSET_OF(Isolate, isolate_data_.external_memory_limit_)),
           Internals::kExternalMemoryLimitOffset);
  CHECK_EQ(Internals::kExternalMemoryAtLastMarkCompactOffset % 8, 0);
  CHECK_EQ(static_cast<int>(OFFSET_OF(
               Isolate, isolate_data_.external_memory_at_last_mark_compact_)),
           Internals::kExternalMemoryAtLastMarkCompactOffset);
}
2943

2944 2945
void Isolate::ClearSerializerData() {
  delete external_reference_map_;
2946
  external_reference_map_ = nullptr;
2947 2948
}

2949 2950 2951 2952 2953 2954 2955
bool Isolate::LogObjectRelocation() {
  return FLAG_verify_predictable || logger()->is_logging() || is_profiling() ||
         heap()->isolate()->logger()->is_listening_to_code_events() ||
         (heap_profiler() != nullptr &&
          heap_profiler()->is_tracking_object_moves()) ||
         heap()->has_heap_object_allocation_tracker();
}
2956

2957
void Isolate::Deinit() {
2958
  TRACE_ISOLATE(deinit);
2959

2960 2961 2962 2963 2964
  tracing_cpu_profiler_.reset();
  if (FLAG_stress_sampling_allocation_profiler > 0) {
    heap_profiler()->StopSamplingHeapProfiler();
  }

2965
#if defined(V8_OS_WIN64)
2966 2967 2968 2969 2970 2971 2972
  if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
      heap()->memory_allocator()) {
    const base::AddressRegion& code_range =
        heap()->memory_allocator()->code_range();
    void* start = reinterpret_cast<void*>(code_range.begin());
    win64_unwindinfo::UnregisterNonABICompliantCodeRange(start);
  }
2973
#endif  // V8_OS_WIN64
2974

2975
  debug()->Unload();
2976

2977
  wasm_engine()->DeleteCompileJobsOnIsolate(this);
2978

2979
  if (concurrent_recompilation_enabled()) {
2980 2981
    optimizing_compile_dispatcher_->Stop();
    delete optimizing_compile_dispatcher_;
2982
    optimizing_compile_dispatcher_ = nullptr;
2983
  }
2984

2985
  wasm_engine()->memory_tracker()->DeleteSharedMemoryObjectsOnIsolate(this);
2986

2987
  heap_.mark_compact_collector()->EnsureSweepingCompleted();
2988
  heap_.memory_allocator()->unmapper()->EnsureUnmappingCompleted();
2989

2990
  DumpAndResetStats();
2991

2992 2993 2994
  if (FLAG_print_deopt_stress) {
    PrintF(stdout, "=== Stress deopt counter: %u\n", stress_deopt_count_);
  }
2995

2996
  // We must stop the logger before we tear down other components.
2997
  sampler::Sampler* sampler = logger_->sampler();
2998
  if (sampler && sampler->IsActive()) sampler->Stop();
2999

3000
  FreeThreadResources();
3001
  logger_->StopProfilerThread();
3002

3003 3004 3005 3006
  // We start with the heap tear down so that releasing managed objects does
  // not cause a GC.
  heap_.StartTearDown();

3007
  ReleaseSharedPtrs();
3008

3009
  delete deoptimizer_data_;
3010
  deoptimizer_data_ = nullptr;
3011 3012
  builtins_.TearDown();
  bootstrapper_->TearDown();
3013

3014
  if (runtime_profiler_ != nullptr) {
3015
    delete runtime_profiler_;
3016
    runtime_profiler_ = nullptr;
3017
  }
3018

3019
  delete heap_profiler_;
3020
  heap_profiler_ = nullptr;
3021

3022
  compiler_dispatcher_->AbortAll();
3023 3024 3025
  delete compiler_dispatcher_;
  compiler_dispatcher_ = nullptr;

3026
  // This stops cancelable tasks (i.e. concurrent marking tasks)
3027 3028
  cancelable_task_manager()->CancelAndWait();

3029 3030
  heap_.TearDown();
  logger_->TearDown();
3031

3032 3033 3034 3035
  if (wasm_engine_) {
    wasm_engine_->RemoveIsolate(this);
    wasm_engine_.reset();
  }
3036

3037
  TearDownEmbeddedBlob();
3038

3039
  delete interpreter_;
3040
  interpreter_ = nullptr;
3041

3042 3043 3044
  delete ast_string_constants_;
  ast_string_constants_ = nullptr;

3045 3046
  code_event_dispatcher_.reset();

3047
  delete root_index_map_;
3048
  root_index_map_ = nullptr;
3049

3050 3051 3052 3053
  delete compiler_zone_;
  compiler_zone_ = nullptr;
  compiler_cache_ = nullptr;

3054
  ClearSerializerData();
3055 3056 3057 3058 3059

  {
    base::MutexGuard lock_guard(&thread_data_table_mutex_);
    thread_data_table_.RemoveAllThreads();
  }
3060 3061 3062 3063
}

void Isolate::SetIsolateThreadLocals(Isolate* isolate,
                                     PerIsolateThreadData* data) {
3064 3065
  base::Thread::SetThreadLocal(isolate_key_, isolate);
  base::Thread::SetThreadLocal(per_isolate_thread_data_key_, data);
3066 3067 3068 3069 3070
}

Isolate::~Isolate() {
  TRACE_ISOLATE(destructor);

3071
  // The entry stack must be empty when we get here.
3072
  DCHECK(entry_stack_ == nullptr || entry_stack_->previous_item == nullptr);
3073 3074

  delete entry_stack_;
3075
  entry_stack_ = nullptr;
3076

3077
  delete date_cache_;
3078
  date_cache_ = nullptr;
3079

3080
  delete regexp_stack_;
3081
  regexp_stack_ = nullptr;
3082 3083

  delete descriptor_lookup_cache_;
3084
  descriptor_lookup_cache_ = nullptr;
3085

3086
  delete load_stub_cache_;
3087
  load_stub_cache_ = nullptr;
3088
  delete store_stub_cache_;
3089
  store_stub_cache_ = nullptr;
3090

jarin@chromium.org's avatar
jarin@chromium.org committed
3091
  delete materialized_object_store_;
3092
  materialized_object_store_ = nullptr;
jarin@chromium.org's avatar
jarin@chromium.org committed
3093

3094
  delete logger_;
3095
  logger_ = nullptr;
3096 3097

  delete handle_scope_implementer_;
3098
  handle_scope_implementer_ = nullptr;
3099

3100
  delete code_tracer();
3101
  set_code_tracer(nullptr);
3102

3103
  delete compilation_cache_;
3104
  compilation_cache_ = nullptr;
3105
  delete bootstrapper_;
3106
  bootstrapper_ = nullptr;
3107
  delete inner_pointer_to_code_cache_;
3108
  inner_pointer_to_code_cache_ = nullptr;
3109 3110

  delete thread_manager_;
3111
  thread_manager_ = nullptr;
3112 3113

  delete global_handles_;
3114
  global_handles_ = nullptr;
3115
  delete eternal_handles_;
3116
  eternal_handles_ = nullptr;
3117

3118
  delete string_stream_debug_object_cache_;
3119
  string_stream_debug_object_cache_ = nullptr;
3120

3121
  delete random_number_generator_;
3122
  random_number_generator_ = nullptr;
3123

3124 3125 3126
  delete fuzzer_rng_;
  fuzzer_rng_ = nullptr;

3127
  delete debug_;
3128
  debug_ = nullptr;
3129

3130 3131 3132
  delete cancelable_task_manager_;
  cancelable_task_manager_ = nullptr;

3133 3134
  delete allocator_;
  allocator_ = nullptr;
3135 3136 3137 3138 3139 3140

  // Assert that |default_microtask_queue_| is the last MicrotaskQueue instance.
  DCHECK_IMPLIES(default_microtask_queue_,
                 default_microtask_queue_ == default_microtask_queue_->next());
  delete default_microtask_queue_;
  default_microtask_queue_ = nullptr;
3141 3142
}

3143
void Isolate::InitializeThreadLocal() { thread_local_top()->Initialize(this); }
3144

3145 3146 3147 3148
void Isolate::SetTerminationOnExternalTryCatch() {
  if (try_catch_handler() == nullptr) return;
  try_catch_handler()->can_continue_ = false;
  try_catch_handler()->has_terminated_ = true;
3149 3150
  try_catch_handler()->exception_ =
      reinterpret_cast<void*>(ReadOnlyRoots(heap()).null_value().ptr());
3151
}
3152

3153
bool Isolate::PropagatePendingExceptionToExternalTryCatch() {
3154
  Object exception = pending_exception();
3155 3156

  if (IsJavaScriptHandlerOnTop(exception)) {
3157
    thread_local_top()->external_caught_exception_ = false;
3158 3159 3160 3161
    return false;
  }

  if (!IsExternalHandlerOnTop(exception)) {
3162
    thread_local_top()->external_caught_exception_ = false;
3163 3164 3165
    return true;
  }

3166
  thread_local_top()->external_caught_exception_ = true;
3167
  if (!is_catchable_by_javascript(exception)) {
3168
    SetTerminationOnExternalTryCatch();
3169 3170
  } else {
    v8::TryCatch* handler = try_catch_handler();
3171 3172
    DCHECK(thread_local_top()->pending_message_obj_.IsJSMessageObject() ||
           thread_local_top()->pending_message_obj_.IsTheHole(this));
3173 3174
    handler->can_continue_ = true;
    handler->has_terminated_ = false;
3175
    handler->exception_ = reinterpret_cast<void*>(pending_exception().ptr());
3176
    // Propagate to the external try-catch only if we got an actual message.
3177
    if (thread_local_top()->pending_message_obj_.IsTheHole(this)) return true;
3178

3179
    handler->message_obj_ =
3180
        reinterpret_cast<void*>(thread_local_top()->pending_message_obj_.ptr());
3181 3182 3183 3184
  }
  return true;
}

3185
bool Isolate::InitializeCounters() {
3186 3187
  if (async_counters_) return false;
  async_counters_ = std::make_shared<Counters>(this);
3188 3189
  return true;
}
3190

3191
void Isolate::InitializeLoggingAndCounters() {
3192
  if (logger_ == nullptr) {
3193
    logger_ = new Logger(this);
3194
  }
3195
  InitializeCounters();
3196 3197
}

3198
namespace {
3199

3200
void CreateOffHeapTrampolines(Isolate* isolate) {
3201
  DCHECK_NOT_NULL(isolate->embedded_blob());
3202
  DCHECK_NE(0, isolate->embedded_blob_size());
3203

3204 3205 3206
  HandleScope scope(isolate);
  Builtins* builtins = isolate->builtins();

3207
  EmbeddedData d = EmbeddedData::FromBlob();
3208

3209
  for (int i = 0; i < Builtins::builtin_count; i++) {
3210
    if (!Builtins::IsIsolateIndependent(i)) continue;
3211

3212
    Address instruction_start = d.InstructionStartOfBuiltin(i);
3213
    Handle<Code> trampoline = isolate->factory()->NewOffHeapTrampolineFor(
3214
        builtins->builtin_handle(i), instruction_start);
3215

3216 3217
    // From this point onwards, the old builtin code object is unreachable and
    // will be collected by the next GC.
3218
    builtins->set_builtin(i, *trampoline);
3219 3220
  }
}
3221

3222 3223
#ifdef DEBUG
bool IsolateIsCompatibleWithEmbeddedBlob(Isolate* isolate) {
3224
  if (!FLAG_embedded_builtins) return true;
3225 3226 3227 3228 3229
  EmbeddedData d = EmbeddedData::FromBlob(isolate);
  return (d.IsolateHash() == isolate->HashIsolateForEmbeddedBlob());
}
#endif  // DEBUG

3230
}  // namespace
3231

3232 3233 3234 3235 3236 3237 3238 3239 3240 3241 3242 3243 3244 3245 3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262
void Isolate::InitializeDefaultEmbeddedBlob() {
  const uint8_t* blob = DefaultEmbeddedBlob();
  uint32_t size = DefaultEmbeddedBlobSize();

#ifdef V8_MULTI_SNAPSHOTS
  if (!FLAG_untrusted_code_mitigations) {
    blob = TrustedEmbeddedBlob();
    size = TrustedEmbeddedBlobSize();
  }
#endif

  if (StickyEmbeddedBlob() != nullptr) {
    base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
    // Check again now that we hold the lock.
    if (StickyEmbeddedBlob() != nullptr) {
      blob = StickyEmbeddedBlob();
      size = StickyEmbeddedBlobSize();
      current_embedded_blob_refs_++;
    }
  }

  if (blob == nullptr) {
    CHECK_EQ(0, size);
  } else {
    SetEmbeddedBlob(blob, size);
  }
}

void Isolate::CreateAndSetEmbeddedBlob() {
  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());

3263 3264
  PrepareBuiltinSourcePositionMap();

3265 3266 3267 3268 3269 3270 3271 3272 3273 3274 3275 3276 3277 3278 3279 3280 3281 3282
  // If a sticky blob has been set, we reuse it.
  if (StickyEmbeddedBlob() != nullptr) {
    CHECK_EQ(embedded_blob(), StickyEmbeddedBlob());
    CHECK_EQ(CurrentEmbeddedBlob(), StickyEmbeddedBlob());
  } else {
    // Create and set a new embedded blob.
    uint8_t* data;
    uint32_t size;
    InstructionStream::CreateOffHeapInstructionStream(this, &data, &size);

    CHECK_EQ(0, current_embedded_blob_refs_);
    const uint8_t* const_data = const_cast<const uint8_t*>(data);
    SetEmbeddedBlob(const_data, size);
    current_embedded_blob_refs_++;

    SetStickyEmbeddedBlob(const_data, size);
  }

3283 3284 3285
  CreateOffHeapTrampolines(this);
}

3286 3287 3288 3289 3290 3291 3292 3293 3294 3295 3296 3297 3298 3299 3300 3301 3302
void Isolate::TearDownEmbeddedBlob() {
  // Nothing to do in case the blob is embedded into the binary or unset.
  if (StickyEmbeddedBlob() == nullptr) return;

  CHECK_EQ(embedded_blob(), StickyEmbeddedBlob());
  CHECK_EQ(CurrentEmbeddedBlob(), StickyEmbeddedBlob());

  base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer());
  current_embedded_blob_refs_--;
  if (current_embedded_blob_refs_ == 0 && enable_embedded_blob_refcounting_) {
    // We own the embedded blob and are the last holder. Free it.
    InstructionStream::FreeOffHeapInstructionStream(
        const_cast<uint8_t*>(embedded_blob()), embedded_blob_size());
    ClearEmbeddedBlob();
  }
}

3303 3304 3305 3306 3307 3308 3309 3310 3311
bool Isolate::InitWithoutSnapshot() { return Init(nullptr, nullptr); }

bool Isolate::InitWithSnapshot(ReadOnlyDeserializer* read_only_deserializer,
                               StartupDeserializer* startup_deserializer) {
  DCHECK_NOT_NULL(read_only_deserializer);
  DCHECK_NOT_NULL(startup_deserializer);
  return Init(read_only_deserializer, startup_deserializer);
}

3312
static void AddCrashKeysForIsolateAndHeapPointers(Isolate* isolate) {
3313 3314
  v8::Platform* platform = V8::GetCurrentPlatform();

3315
  const int id = isolate->id();
3316
  platform->AddCrashKey(id, "isolate", reinterpret_cast<uintptr_t>(isolate));
3317 3318

  auto heap = isolate->heap();
3319
  platform->AddCrashKey(id, "ro_space",
3320
    reinterpret_cast<uintptr_t>(heap->read_only_space()->first_page()));
3321
  platform->AddCrashKey(id, "map_space",
3322
    reinterpret_cast<uintptr_t>(heap->map_space()->first_page()));
3323
  platform->AddCrashKey(id, "code_space",
3324 3325 3326
    reinterpret_cast<uintptr_t>(heap->code_space()->first_page()));
}

3327 3328
bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
                   StartupDeserializer* startup_deserializer) {
3329
  TRACE_ISOLATE(init);
3330 3331 3332
  const bool create_heap_objects = (read_only_deserializer == nullptr);
  // We either have both or neither.
  DCHECK_EQ(create_heap_objects, startup_deserializer == nullptr);
3333

3334
  base::ElapsedTimer timer;
3335
  if (create_heap_objects && FLAG_profile_deserialization) timer.Start();
3336

3337 3338
  time_millis_at_init_ = heap_.MonotonicallyIncreasingTimeInMs();

3339
  stress_deopt_count_ = FLAG_deopt_every_n_times;
3340
  force_slow_path_ = FLAG_force_slow_path;
3341

3342 3343
  has_fatal_error_ = false;

3344
  // The initialization process does not handle memory exhaustion.
3345
  AlwaysAllocateScope always_allocate(this);
3346

3347
  // Safe after setting Heap::isolate_, and initializing StackGuard
3348 3349
  heap_.SetStackLimits();

3350
#define ASSIGN_ELEMENT(CamelName, hacker_name)                  \
3351
  isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
3352 3353
      reinterpret_cast<Address>(hacker_name##_address());
  FOR_EACH_ISOLATE_ADDRESS_NAME(ASSIGN_ELEMENT)
3354
#undef ASSIGN_ELEMENT
3355 3356 3357

  compilation_cache_ = new CompilationCache(this);
  descriptor_lookup_cache_ = new DescriptorLookupCache();
3358
  inner_pointer_to_code_cache_ = new InnerPointerToCodeCache(this);
3359
  global_handles_ = new GlobalHandles(this);
3360
  eternal_handles_ = new EternalHandles();
3361
  bootstrapper_ = new Bootstrapper(this);
3362
  handle_scope_implementer_ = new HandleScopeImplementer(this);
3363 3364
  load_stub_cache_ = new StubCache(this);
  store_stub_cache_ = new StubCache(this);
jarin@chromium.org's avatar
jarin@chromium.org committed
3365
  materialized_object_store_ = new MaterializedObjectStore(this);
3366 3367
  regexp_stack_ = new RegExpStack();
  regexp_stack_->isolate_ = this;
3368
  date_cache_ = new DateCache();
3369
  heap_profiler_ = new HeapProfiler(heap());
3370
  interpreter_ = new interpreter::Interpreter(this);
3371

3372 3373
  compiler_dispatcher_ =
      new CompilerDispatcher(this, V8::GetCurrentPlatform(), FLAG_stack_size);
3374 3375

  // Enable logging before setting up the heap
3376
  logger_->SetUp(this);
3377

3378
  {  // NOLINT
3379 3380 3381 3382
    // Ensure that the thread has a valid stack guard.  The v8::Locker object
    // will ensure this too, but we don't have to use lockers if we are only
    // using one thread.
    ExecutionAccess lock(this);
3383
    stack_guard()->InitThread(lock);
3384 3385
  }

3386
  // SetUp the object heap.
3387
  DCHECK(!heap_.HasBeenSetUp());
3388 3389 3390
  heap_.SetUp();
  ReadOnlyHeap::SetUp(this, read_only_deserializer);
  heap_.SetUpSpaces();
3391

3392 3393
  isolate_data_.external_reference_table()->Init(this);

3394
  // Setup the wasm engine.
3395
  if (wasm_engine_ == nullptr) {
3396
    SetWasmEngine(wasm::WasmEngine::GetWasmEngine());
3397
  }
3398
  DCHECK_NOT_NULL(wasm_engine_);
3399

3400
  deoptimizer_data_ = new DeoptimizerData(heap());
3401

3402 3403 3404 3405
  if (setup_delegate_ == nullptr) {
    setup_delegate_ = new SetupIsolateDelegate(create_heap_objects);
  }

3406 3407
  if (!FLAG_inline_new) heap_.DisableInlineAllocation();

3408
  if (!setup_delegate_->SetupHeap(&heap_)) {
3409
    V8::FatalProcessOutOfMemory(this, "heap object creation");
3410 3411 3412
    return false;
  }

3413
  if (create_heap_objects) {
3414
    // Terminate the partial snapshot cache so we can iterate.
3415
    partial_snapshot_cache_.push_back(ReadOnlyRoots(this).undefined_value());
3416 3417
  }

3418 3419
  InitializeThreadLocal();

3420 3421 3422 3423
  // Profiler has to be created after ThreadLocal is initialized
  // because it makes use of interrupts.
  tracing_cpu_profiler_.reset(new TracingCpuProfilerImpl(this));

3424
  bootstrapper_->Initialize(create_heap_objects);
3425

3426 3427
  if (FLAG_embedded_builtins && create_heap_objects) {
    builtins_constants_table_builder_ = new BuiltinsConstantsTableBuilder(this);
3428
  }
3429
  setup_delegate_->SetupBuiltins(this);
3430
#ifndef V8_TARGET_ARCH_ARM
3431
  if (create_heap_objects) {
3432 3433 3434
    // Store the interpreter entry trampoline on the root list. It is used as a
    // template for further copies that may later be created to help profile
    // interpreted code.
3435 3436 3437 3438 3439
    // We currently cannot do this on arm due to RELATIVE_CODE_TARGETs
    // assuming that all possible Code targets may be addressed with an int24
    // offset, effectively limiting code space size to 32MB. We can guarantee
    // this at mksnapshot-time, but not at runtime.
    // See also: https://crbug.com/v8/8713.
3440 3441
    heap_.SetInterpreterEntryTrampolineForProfiling(
        heap_.builtin(Builtins::kInterpreterEntryTrampoline));
3442
  }
3443
#endif
3444 3445 3446 3447 3448 3449
  if (FLAG_embedded_builtins && create_heap_objects) {
    builtins_constants_table_builder_->Finalize();
    delete builtins_constants_table_builder_;
    builtins_constants_table_builder_ = nullptr;

    CreateAndSetEmbeddedBlob();
3450 3451
  }

3452 3453 3454 3455
  // Initialize custom memcopy and memmove functions (must happen after
  // embedded blob setup).
  init_memcopy_functions();

3456 3457 3458 3459
  if (FLAG_log_internal_timer_events) {
    set_event_logger(Logger::DefaultEventLoggerSentinel);
  }

3460
  if (FLAG_trace_turbo || FLAG_trace_turbo_graph || FLAG_turbo_profiling) {
3461
    PrintF("Concurrent recompilation has been disabled for tracing.\n");
3462
  } else if (OptimizingCompileDispatcher::Enabled()) {
3463
    optimizing_compile_dispatcher_ = new OptimizingCompileDispatcher(this);
3464 3465
  }

3466 3467 3468 3469
  // Initialize runtime profiler before deserialization, because collections may
  // occur, clearing/updating ICs.
  runtime_profiler_ = new RuntimeProfiler(this);

3470
  // If we are deserializing, read the state into the now-empty heap.
3471 3472
  {
    AlwaysAllocateScope always_allocate(this);
3473
    CodeSpaceMemoryModificationScope modification_scope(&heap_);
3474

3475 3476
    if (create_heap_objects) {
      heap_.read_only_space()->ClearStringPaddingIfNeeded();
3477
      read_only_heap_->OnCreateHeapObjectsComplete(this);
3478
    } else {
3479
      startup_deserializer->DeserializeInto(this);
3480
    }
3481 3482
    load_stub_cache_->Initialize();
    store_stub_cache_->Initialize();
3483
    interpreter_->Initialize();
3484
    heap_.NotifyDeserializationComplete();
3485
  }
3486 3487 3488 3489 3490 3491 3492

#ifdef VERIFY_HEAP
  if (FLAG_verify_heap) {
    heap_.VerifyReadOnlyHeap();
  }
#endif

3493 3494
  delete setup_delegate_;
  setup_delegate_ = nullptr;
3495

3496
  Builtins::UpdateBuiltinEntryTable(this);
3497
  Builtins::EmitCodeCreateEvents(this);
3498

3499
#ifdef DEBUG
3500 3501 3502 3503
  // Verify that the current heap state (usually deserialized from the snapshot)
  // is compatible with the embedded blob. If this DCHECK fails, we've likely
  // loaded a snapshot generated by a different V8 version or build-time
  // configuration.
3504 3505 3506 3507 3508 3509 3510
  if (!IsolateIsCompatibleWithEmbeddedBlob(this)) {
    FATAL(
        "The Isolate is incompatible with the embedded blob. This is usually "
        "caused by incorrect usage of mksnapshot. When generating custom "
        "snapshots, embedders must ensure they pass the same flags as during "
        "the V8 build process (e.g.: --turbo-instruction-scheduling).");
  }
3511
  DCHECK_IMPLIES(FLAG_jitless, FLAG_embedded_builtins);
3512
#endif  // DEBUG
3513

3514 3515 3516
#ifndef V8_TARGET_ARCH_ARM
  // The IET for profiling should always be a full on-heap Code object.
  DCHECK(!Code::cast(heap_.interpreter_entry_trampoline_for_profiling())
3517
              .is_off_heap_trampoline());
3518 3519
#endif  // V8_TARGET_ARCH_ARM

3520 3521
  if (FLAG_print_builtin_code) builtins()->PrintBuiltinCode();
  if (FLAG_print_builtin_size) builtins()->PrintBuiltinSize();
3522

3523 3524 3525 3526 3527
  // Finish initialization of ThreadLocal after deserialization is done.
  clear_pending_exception();
  clear_pending_message();
  clear_scheduled_exception();

3528 3529 3530 3531
  // Deserializing may put strange things in the root array's copy of the
  // stack guard.
  heap_.SetStackLimits();

3532
  // Quiet the heap NaN if needed on target platform.
3533 3534
  if (!create_heap_objects)
    Assembler::QuietNaN(ReadOnlyRoots(this).nan_value());
3535

3536
  if (FLAG_trace_turbo) {
3537
    // Create an empty file.
3538
    std::ofstream(GetTurboCfgFileName(this).c_str(), std::ios_base::trunc);
3539 3540
  }

3541 3542
  {
    HandleScope scope(this);
3543
    ast_string_constants_ = new AstStringConstants(this, HashSeed(this));
3544 3545
  }

3546
  initialized_from_snapshot_ = !create_heap_objects;
3547

3548 3549 3550 3551 3552 3553 3554 3555 3556
  if (FLAG_stress_sampling_allocation_profiler > 0) {
    uint64_t sample_interval = FLAG_stress_sampling_allocation_profiler;
    int stack_depth = 128;
    v8::HeapProfiler::SamplingFlags sampling_flags =
        v8::HeapProfiler::SamplingFlags::kSamplingForceGC;
    heap_profiler()->StartSamplingHeapProfiler(sample_interval, stack_depth,
                                               sampling_flags);
  }

3557
#if defined(V8_OS_WIN64)
3558 3559 3560 3561 3562 3563 3564
  if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange()) {
    const base::AddressRegion& code_range =
        heap()->memory_allocator()->code_range();
    void* start = reinterpret_cast<void*>(code_range.begin());
    size_t size_in_bytes = code_range.size();
    win64_unwindinfo::RegisterNonABICompliantCodeRange(start, size_in_bytes);
  }
3565
#endif  // V8_OS_WIN64
3566

3567
  if (create_heap_objects && FLAG_profile_deserialization) {
3568 3569 3570 3571
    double ms = timer.Elapsed().InMillisecondsF();
    PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
  }

3572
  AddCrashKeysForIsolateAndHeapPointers(this);
3573 3574 3575 3576
  return true;
}

void Isolate::Enter() {
3577
  Isolate* current_isolate = nullptr;
3578
  PerIsolateThreadData* current_data = CurrentPerIsolateThreadData();
3579
  if (current_data != nullptr) {
3580
    current_isolate = current_data->isolate_;
3581
    DCHECK_NOT_NULL(current_isolate);
3582
    if (current_isolate == this) {
3583
      DCHECK(Current() == this);
3584
      DCHECK_NOT_NULL(entry_stack_);
3585
      DCHECK(entry_stack_->previous_thread_data == nullptr ||
Clemens Hammacher's avatar
Clemens Hammacher committed
3586 3587
             entry_stack_->previous_thread_data->thread_id() ==
                 ThreadId::Current());
3588 3589 3590 3591 3592 3593 3594
      // Same thread re-enters the isolate, no need to re-init anything.
      entry_stack_->entry_count++;
      return;
    }
  }

  PerIsolateThreadData* data = FindOrAllocatePerThreadDataForThisThread();
3595
  DCHECK_NOT_NULL(data);
3596
  DCHECK(data->isolate_ == this);
3597

3598 3599
  EntryStackItem* item =
      new EntryStackItem(current_data, current_isolate, entry_stack_);
3600 3601 3602 3603 3604 3605 3606 3607 3608
  entry_stack_ = item;

  SetIsolateThreadLocals(this, data);

  // In case it's the first time some thread enters the isolate.
  set_thread_id(data->thread_id());
}

void Isolate::Exit() {
3609
  DCHECK_NOT_NULL(entry_stack_);
3610
  DCHECK(entry_stack_->previous_thread_data == nullptr ||
Clemens Hammacher's avatar
Clemens Hammacher committed
3611 3612
         entry_stack_->previous_thread_data->thread_id() ==
             ThreadId::Current());
3613 3614 3615

  if (--entry_stack_->entry_count > 0) return;

3616
  DCHECK_NOT_NULL(CurrentPerIsolateThreadData());
3617
  DCHECK(CurrentPerIsolateThreadData()->isolate_ == this);
3618 3619 3620 3621 3622 3623 3624 3625 3626 3627 3628 3629 3630 3631

  // Pop the stack.
  EntryStackItem* item = entry_stack_;
  entry_stack_ = item->previous_item;

  PerIsolateThreadData* previous_thread_data = item->previous_thread_data;
  Isolate* previous_isolate = item->previous_isolate;

  delete item;

  // Reinit the current thread for the isolate it was running before this one.
  SetIsolateThreadLocals(previous_isolate, previous_thread_data);
}

3632 3633
void Isolate::LinkDeferredHandles(DeferredHandles* deferred) {
  deferred->next_ = deferred_handles_head_;
3634
  if (deferred_handles_head_ != nullptr) {
3635 3636 3637 3638 3639 3640 3641 3642 3643
    deferred_handles_head_->previous_ = deferred;
  }
  deferred_handles_head_ = deferred;
}

void Isolate::UnlinkDeferredHandles(DeferredHandles* deferred) {
#ifdef DEBUG
  // In debug mode assert that the linked list is well-formed.
  DeferredHandles* deferred_iterator = deferred;
3644
  while (deferred_iterator->previous_ != nullptr) {
3645 3646
    deferred_iterator = deferred_iterator->previous_;
  }
3647
  DCHECK(deferred_handles_head_ == deferred_iterator);
3648 3649 3650 3651
#endif
  if (deferred_handles_head_ == deferred) {
    deferred_handles_head_ = deferred_handles_head_->next_;
  }
3652
  if (deferred->next_ != nullptr) {
3653 3654
    deferred->next_->previous_ = deferred->previous_;
  }
3655
  if (deferred->previous_ != nullptr) {
3656 3657 3658 3659
    deferred->previous_->next_ = deferred->next_;
  }
}

3660
void Isolate::DumpAndResetStats() {
3661
  if (turbo_statistics() != nullptr) {
3662
    DCHECK(FLAG_turbo_stats || FLAG_turbo_stats_nvp);
3663
    StdoutStream os;
3664 3665 3666 3667 3668 3669 3670 3671
    if (FLAG_turbo_stats) {
      AsPrintableStatistics ps = {*turbo_statistics(), false};
      os << ps << std::endl;
    }
    if (FLAG_turbo_stats_nvp) {
      AsPrintableStatistics ps = {*turbo_statistics(), true};
      os << ps << std::endl;
    }
3672 3673 3674 3675 3676 3677 3678
    delete turbo_statistics_;
    turbo_statistics_ = nullptr;
  }
  // TODO(7424): There is no public API for the {WasmEngine} yet. So for now we
  // just dump and reset the engines statistics together with the Isolate.
  if (FLAG_turbo_stats_wasm) {
    wasm_engine()->DumpAndResetTurboStatistics();
3679
  }
3680
  if (V8_UNLIKELY(TracingFlags::runtime_stats.load(std::memory_order_relaxed) ==
3681
                  v8::tracing::TracingCategoryObserver::ENABLED_BY_NATIVE)) {
3682 3683
    counters()->worker_thread_runtime_call_stats()->AddToMainTable(
        counters()->runtime_call_stats());
3684
    counters()->runtime_call_stats()->Print();
3685
    counters()->runtime_call_stats()->Reset();
3686
  }
3687 3688
}

3689 3690 3691 3692 3693 3694
void Isolate::AbortConcurrentOptimization(BlockingBehavior behavior) {
  if (concurrent_recompilation_enabled()) {
    DisallowHeapAllocation no_recursive_gc;
    optimizing_compile_dispatcher()->Flush(behavior);
  }
}
3695

3696
CompilationStatistics* Isolate::GetTurboStatistics() {
3697
  if (turbo_statistics() == nullptr)
3698 3699
    set_turbo_statistics(new CompilationStatistics());
  return turbo_statistics();
3700 3701
}

3702
CodeTracer* Isolate::GetCodeTracer() {
3703
  if (code_tracer() == nullptr) set_code_tracer(new CodeTracer(id()));
3704 3705 3706
  return code_tracer();
}

Mythri's avatar
Mythri committed
3707
bool Isolate::use_optimizer() {
3708
  return FLAG_opt && !serializer_enabled_ && CpuFeatures::SupportsOptimizer() &&
3709
         !is_precise_count_code_coverage();
3710 3711
}

3712
bool Isolate::NeedsDetailedOptimizedCodeLineInfo() const {
3713 3714
  return NeedsSourcePositionsForProfiling() ||
         detailed_source_positions_for_profiling();
3715 3716
}

3717 3718
bool Isolate::NeedsSourcePositionsForProfiling() const {
  return FLAG_trace_deopt || FLAG_trace_turbo || FLAG_trace_turbo_graph ||
3719
         FLAG_turbo_profiling || FLAG_perf_prof || is_profiling() ||
3720
         debug_->is_active() || logger_->is_logging() || FLAG_trace_maps;
3721 3722
}

3723
void Isolate::SetFeedbackVectorsForProfilingTools(Object value) {
3724
  DCHECK(value.IsUndefined(this) || value.IsArrayList());
3725 3726 3727
  heap()->set_feedback_vectors_for_profiling_tools(value);
}

3728
void Isolate::MaybeInitializeVectorListFromHeap() {
3729
  if (!heap()->feedback_vectors_for_profiling_tools().IsUndefined(this)) {
3730
    // Already initialized, return early.
3731
    DCHECK(heap()->feedback_vectors_for_profiling_tools().IsArrayList());
3732 3733 3734
    return;
  }

3735 3736
  // Collect existing feedback vectors.
  std::vector<Handle<FeedbackVector>> vectors;
3737

3738
  {
3739
    HeapObjectIterator heap_iterator(heap());
3740 3741
    for (HeapObject current_obj = heap_iterator.Next(); !current_obj.is_null();
         current_obj = heap_iterator.Next()) {
3742
      if (!current_obj.IsFeedbackVector()) continue;
3743

3744
      FeedbackVector vector = FeedbackVector::cast(current_obj);
3745
      SharedFunctionInfo shared = vector.shared_function_info();
3746 3747

      // No need to preserve the feedback vector for non-user-visible functions.
3748
      if (!shared.IsSubjectToDebugging()) continue;
3749 3750

      vectors.emplace_back(vector, this);
3751 3752 3753
    }
  }

3754
  // Add collected feedback vectors to the root list lest we lose them to GC.
3755 3756
  Handle<ArrayList> list =
      ArrayList::New(this, static_cast<int>(vectors.size()));
3757
  for (const auto& vector : vectors) list = ArrayList::Add(this, list, vector);
3758
  SetFeedbackVectorsForProfilingTools(*list);
3759 3760
}

3761 3762 3763 3764 3765 3766 3767
void Isolate::set_date_cache(DateCache* date_cache) {
  if (date_cache != date_cache_) {
    delete date_cache_;
  }
  date_cache_ = date_cache;
}

3768 3769
bool Isolate::IsArrayOrObjectOrStringPrototype(Object object) {
  Object context = heap()->native_contexts_list();
3770
  while (!context.IsUndefined(this)) {
3771
    Context current_context = Context::cast(context);
3772 3773 3774
    if (current_context.initial_object_prototype() == object ||
        current_context.initial_array_prototype() == object ||
        current_context.initial_string_prototype() == object) {
3775 3776
      return true;
    }
3777
    context = current_context.next_context_link();
3778 3779 3780 3781
  }
  return false;
}

3782
bool Isolate::IsInAnyContext(Object object, uint32_t index) {
3783
  DisallowHeapAllocation no_gc;
3784
  Object context = heap()->native_contexts_list();
3785
  while (!context.IsUndefined(this)) {
3786
    Context current_context = Context::cast(context);
3787
    if (current_context.get(index) == object) {
3788 3789
      return true;
    }
3790
    context = current_context.next_context_link();
3791 3792 3793
  }
  return false;
}
3794

3795
bool Isolate::IsNoElementsProtectorIntact(Context context) {
3796
  PropertyCell no_elements_cell = heap()->no_elements_protector();
3797
  bool cell_reports_intact =
3798 3799
      no_elements_cell.value().IsSmi() &&
      Smi::ToInt(no_elements_cell.value()) == kProtectorValid;
3800 3801

#ifdef DEBUG
3802
  Context native_context = context.native_context();
3803

3804
  Map root_array_map =
3805
      native_context.GetInitialJSArrayMap(GetInitialFastElementsKind());
3806
  JSObject initial_array_proto = JSObject::cast(
3807
      native_context.get(Context::INITIAL_ARRAY_PROTOTYPE_INDEX));
3808
  JSObject initial_object_proto = JSObject::cast(
3809
      native_context.get(Context::INITIAL_OBJECT_PROTOTYPE_INDEX));
3810
  JSObject initial_string_proto = JSObject::cast(
3811
      native_context.get(Context::INITIAL_STRING_PROTOTYPE_INDEX));
3812

3813
  if (root_array_map.is_null() || initial_array_proto == initial_object_proto) {
3814 3815 3816 3817
    // We are in the bootstrapping process, and the entire check sequence
    // shouldn't be performed.
    return cell_reports_intact;
  }
3818 3819

  // Check that the array prototype hasn't been altered WRT empty elements.
3820
  if (root_array_map.prototype() != initial_array_proto) {
3821 3822 3823 3824
    DCHECK_EQ(false, cell_reports_intact);
    return cell_reports_intact;
  }

3825
  FixedArrayBase elements = initial_array_proto.elements();
3826 3827 3828
  ReadOnlyRoots roots(heap());
  if (elements != roots.empty_fixed_array() &&
      elements != roots.empty_slow_element_dictionary()) {
3829 3830
    DCHECK_EQ(false, cell_reports_intact);
    return cell_reports_intact;
3831 3832
  }

3833
  // Check that the Object.prototype hasn't been altered WRT empty elements.
3834
  elements = initial_object_proto.elements();
3835 3836
  if (elements != roots.empty_fixed_array() &&
      elements != roots.empty_slow_element_dictionary()) {
3837 3838 3839 3840 3841 3842
    DCHECK_EQ(false, cell_reports_intact);
    return cell_reports_intact;
  }

  // Check that the Array.prototype has the Object.prototype as its
  // [[Prototype]] and that the Object.prototype has a null [[Prototype]].
3843 3844
  PrototypeIterator iter(this, initial_array_proto);
  if (iter.IsAtEnd() || iter.GetCurrent() != initial_object_proto) {
3845
    DCHECK_EQ(false, cell_reports_intact);
3846
    DCHECK(!has_pending_exception());
3847
    return cell_reports_intact;
3848
  }
3849 3850 3851
  iter.Advance();
  if (!iter.IsAtEnd()) {
    DCHECK_EQ(false, cell_reports_intact);
3852
    DCHECK(!has_pending_exception());
3853 3854
    return cell_reports_intact;
  }
3855
  DCHECK(!has_pending_exception());
3856

3857
  // Check that the String.prototype hasn't been altered WRT empty elements.
3858
  elements = initial_string_proto.elements();
3859 3860
  if (elements != roots.empty_fixed_array() &&
      elements != roots.empty_slow_element_dictionary()) {
3861 3862
    DCHECK_EQ(false, cell_reports_intact);
    return cell_reports_intact;
3863 3864
  }

3865 3866
  // Check that the String.prototype has the Object.prototype
  // as its [[Prototype]] still.
3867
  if (initial_string_proto.map().prototype() != initial_object_proto) {
3868 3869 3870 3871 3872 3873 3874 3875
    DCHECK_EQ(false, cell_reports_intact);
    return cell_reports_intact;
  }
#endif

  return cell_reports_intact;
}

3876 3877 3878 3879
bool Isolate::IsNoElementsProtectorIntact() {
  return Isolate::IsNoElementsProtectorIntact(context());
}

3880
bool Isolate::IsIsConcatSpreadableLookupChainIntact() {
3881
  Cell is_concat_spreadable_cell = heap()->is_concat_spreadable_protector();
3882
  bool is_is_concat_spreadable_set =
3883
      Smi::ToInt(is_concat_spreadable_cell.value()) == kProtectorInvalid;
3884
#ifdef DEBUG
3885
  Map root_array_map =
3886
      raw_native_context().GetInitialJSArrayMap(GetInitialFastElementsKind());
3887
  if (root_array_map.is_null()) {
3888 3889 3890 3891 3892 3893
    // Ignore the value of is_concat_spreadable during bootstrap.
    return !is_is_concat_spreadable_set;
  }
  Handle<Object> array_prototype(array_function()->prototype(), this);
  Handle<Symbol> key = factory()->is_concat_spreadable_symbol();
  Handle<Object> value;
3894
  LookupIterator it(this, array_prototype, key);
3895
  if (it.IsFound() && !JSReceiver::GetDataProperty(&it)->IsUndefined(this)) {
3896 3897 3898 3899 3900 3901 3902 3903 3904
    // TODO(cbruni): Currently we do not revert if we unset the
    // @@isConcatSpreadable property on Array.prototype or Object.prototype
    // hence the reverse implication doesn't hold.
    DCHECK(is_is_concat_spreadable_set);
    return false;
  }
#endif  // DEBUG

  return !is_is_concat_spreadable_set;
3905
}
3906

3907
bool Isolate::IsIsConcatSpreadableLookupChainIntact(JSReceiver receiver) {
3908
  if (!IsIsConcatSpreadableLookupChainIntact()) return false;
3909
  return !receiver.HasProxyInPrototype(this);
3910 3911
}

3912
bool Isolate::IsPromiseHookProtectorIntact() {
3913
  PropertyCell promise_hook_cell = heap()->promise_hook_protector();
3914
  bool is_promise_hook_protector_intact =
3915
      Smi::ToInt(promise_hook_cell.value()) == kProtectorValid;
3916
  DCHECK_IMPLIES(is_promise_hook_protector_intact,
3917
                 !promise_hook_or_async_event_delegate_);
3918 3919
  DCHECK_IMPLIES(is_promise_hook_protector_intact,
                 !promise_hook_or_debug_is_active_or_async_event_delegate_);
3920 3921 3922
  return is_promise_hook_protector_intact;
}

3923
bool Isolate::IsPromiseResolveLookupChainIntact() {
3924
  Cell promise_resolve_cell = heap()->promise_resolve_protector();
3925
  bool is_promise_resolve_protector_intact =
3926
      Smi::ToInt(promise_resolve_cell.value()) == kProtectorValid;
3927 3928 3929
  return is_promise_resolve_protector_intact;
}

3930
bool Isolate::IsPromiseThenLookupChainIntact() {
3931
  PropertyCell promise_then_cell = heap()->promise_then_protector();
3932
  bool is_promise_then_protector_intact =
3933
      Smi::ToInt(promise_then_cell.value()) == kProtectorValid;
3934 3935 3936
  return is_promise_then_protector_intact;
}

3937 3938 3939
bool Isolate::IsPromiseThenLookupChainIntact(Handle<JSReceiver> receiver) {
  DisallowHeapAllocation no_gc;
  if (!receiver->IsJSPromise()) return false;
3940
  if (!IsInAnyContext(receiver->map().prototype(),
3941 3942 3943 3944 3945 3946
                      Context::PROMISE_PROTOTYPE_INDEX)) {
    return false;
  }
  return IsPromiseThenLookupChainIntact();
}

3947
void Isolate::UpdateNoElementsProtectorOnSetElement(Handle<JSObject> object) {
3948
  DisallowHeapAllocation no_gc;
3949
  if (!object->map().is_prototype_map()) return;
3950
  if (!IsNoElementsProtectorIntact()) return;
3951
  if (!IsArrayOrObjectOrStringPrototype(*object)) return;
3952
  PropertyCell::SetValueWithInvalidation(
3953
      this, "no_elements_protector", factory()->no_elements_protector(),
3954
      handle(Smi::FromInt(kProtectorInvalid), this));
3955 3956
}

3957 3958 3959 3960 3961 3962 3963 3964 3965 3966 3967 3968 3969 3970 3971
void Isolate::TraceProtectorInvalidation(const char* protector_name) {
  static constexpr char kInvalidateProtectorTracingCategory[] =
      "V8.InvalidateProtector";
  static constexpr char kInvalidateProtectorTracingArg[] = "protector-name";

  DCHECK(FLAG_trace_protector_invalidation);

  // TODO(jgruber): Remove the PrintF once tracing can output to stdout.
  i::PrintF("Invalidating protector cell %s in isolate %p\n", protector_name,
            this);
  TRACE_EVENT_INSTANT1("v8", kInvalidateProtectorTracingCategory,
                       TRACE_EVENT_SCOPE_THREAD, kInvalidateProtectorTracingArg,
                       protector_name);
}

3972
void Isolate::InvalidateIsConcatSpreadableProtector() {
3973
  DCHECK(factory()->is_concat_spreadable_protector()->value().IsSmi());
3974
  DCHECK(IsIsConcatSpreadableLookupChainIntact());
3975 3976 3977
  if (FLAG_trace_protector_invalidation) {
    TraceProtectorInvalidation("is_concat_spreadable_protector");
  }
3978
  factory()->is_concat_spreadable_protector()->set_value(
3979
      Smi::FromInt(kProtectorInvalid));
3980 3981 3982
  DCHECK(!IsIsConcatSpreadableLookupChainIntact());
}

3983
void Isolate::InvalidateArrayConstructorProtector() {
3984
  DCHECK(factory()->array_constructor_protector()->value().IsSmi());
3985
  DCHECK(IsArrayConstructorIntact());
3986 3987 3988
  if (FLAG_trace_protector_invalidation) {
    TraceProtectorInvalidation("array_constructor_protector");
  }
3989 3990 3991 3992 3993
  factory()->array_constructor_protector()->set_value(
      Smi::FromInt(kProtectorInvalid));
  DCHECK(!IsArrayConstructorIntact());
}

3994
void Isolate::InvalidateArraySpeciesProtector() {
3995
  DCHECK(factory()->array_species_protector()->value().IsSmi());
3996
  DCHECK(IsArraySpeciesLookupChainIntact());
3997
  PropertyCell::SetValueWithInvalidation(
3998
      this, "array_species_protector", factory()->array_species_protector(),
3999
      handle(Smi::FromInt(kProtectorInvalid), this));
4000 4001 4002 4003
  DCHECK(!IsArraySpeciesLookupChainIntact());
}

void Isolate::InvalidateTypedArraySpeciesProtector() {
4004
  DCHECK(factory()->typed_array_species_protector()->value().IsSmi());
4005
  DCHECK(IsTypedArraySpeciesLookupChainIntact());
4006
  PropertyCell::SetValueWithInvalidation(
4007 4008
      this, "typed_array_species_protector",
      factory()->typed_array_species_protector(),
4009
      handle(Smi::FromInt(kProtectorInvalid), this));
4010 4011 4012
  DCHECK(!IsTypedArraySpeciesLookupChainIntact());
}

4013 4014 4015 4016 4017 4018 4019
void Isolate::InvalidateRegExpSpeciesProtector(
    Handle<NativeContext> native_context) {
  DCHECK_EQ(*native_context, this->raw_native_context());
  DCHECK(native_context->regexp_species_protector().value().IsSmi());
  DCHECK(IsRegExpSpeciesLookupChainIntact(native_context));
  Handle<PropertyCell> species_cell(native_context->regexp_species_protector(),
                                    this);
4020
  PropertyCell::SetValueWithInvalidation(
4021 4022
      this, "regexp_species_protector", species_cell,
      handle(Smi::FromInt(kProtectorInvalid), this));
4023
  DCHECK(!IsRegExpSpeciesLookupChainIntact(native_context));
4024 4025
}

4026
void Isolate::InvalidatePromiseSpeciesProtector() {
4027
  DCHECK(factory()->promise_species_protector()->value().IsSmi());
4028
  DCHECK(IsPromiseSpeciesLookupChainIntact());
4029
  PropertyCell::SetValueWithInvalidation(
4030
      this, "promise_species_protector", factory()->promise_species_protector(),
4031
      handle(Smi::FromInt(kProtectorInvalid), this));
4032
  DCHECK(!IsPromiseSpeciesLookupChainIntact());
4033
}
4034

4035
void Isolate::InvalidateStringLengthOverflowProtector() {
4036
  DCHECK(factory()->string_length_protector()->value().IsSmi());
4037
  DCHECK(IsStringLengthOverflowIntact());
4038 4039 4040
  if (FLAG_trace_protector_invalidation) {
    TraceProtectorInvalidation("string_length_protector");
  }
4041 4042
  factory()->string_length_protector()->set_value(
      Smi::FromInt(kProtectorInvalid));
4043 4044 4045
  DCHECK(!IsStringLengthOverflowIntact());
}

4046
void Isolate::InvalidateArrayIteratorProtector() {
4047
  DCHECK(factory()->array_iterator_protector()->value().IsSmi());
4048
  DCHECK(IsArrayIteratorLookupChainIntact());
4049
  PropertyCell::SetValueWithInvalidation(
4050
      this, "array_iterator_protector", factory()->array_iterator_protector(),
4051
      handle(Smi::FromInt(kProtectorInvalid), this));
4052 4053 4054
  DCHECK(!IsArrayIteratorLookupChainIntact());
}

4055
void Isolate::InvalidateMapIteratorProtector() {
4056
  DCHECK(factory()->map_iterator_protector()->value().IsSmi());
4057 4058
  DCHECK(IsMapIteratorLookupChainIntact());
  PropertyCell::SetValueWithInvalidation(
4059
      this, "map_iterator_protector", factory()->map_iterator_protector(),
4060 4061 4062 4063 4064
      handle(Smi::FromInt(kProtectorInvalid), this));
  DCHECK(!IsMapIteratorLookupChainIntact());
}

void Isolate::InvalidateSetIteratorProtector() {
4065
  DCHECK(factory()->set_iterator_protector()->value().IsSmi());
4066 4067
  DCHECK(IsSetIteratorLookupChainIntact());
  PropertyCell::SetValueWithInvalidation(
4068
      this, "set_iterator_protector", factory()->set_iterator_protector(),
4069 4070 4071 4072
      handle(Smi::FromInt(kProtectorInvalid), this));
  DCHECK(!IsSetIteratorLookupChainIntact());
}

4073
void Isolate::InvalidateStringIteratorProtector() {
4074
  DCHECK(factory()->string_iterator_protector()->value().IsSmi());
4075 4076
  DCHECK(IsStringIteratorLookupChainIntact());
  PropertyCell::SetValueWithInvalidation(
4077
      this, "string_iterator_protector", factory()->string_iterator_protector(),
4078 4079 4080 4081
      handle(Smi::FromInt(kProtectorInvalid), this));
  DCHECK(!IsStringIteratorLookupChainIntact());
}

4082
void Isolate::InvalidateArrayBufferDetachingProtector() {
4083
  DCHECK(factory()->array_buffer_detaching_protector()->value().IsSmi());
4084
  DCHECK(IsArrayBufferDetachingIntact());
4085
  PropertyCell::SetValueWithInvalidation(
4086 4087
      this, "array_buffer_detaching_protector",
      factory()->array_buffer_detaching_protector(),
4088
      handle(Smi::FromInt(kProtectorInvalid), this));
4089
  DCHECK(!IsArrayBufferDetachingIntact());
4090 4091
}

4092
void Isolate::InvalidatePromiseHookProtector() {
4093
  DCHECK(factory()->promise_hook_protector()->value().IsSmi());
4094 4095
  DCHECK(IsPromiseHookProtectorIntact());
  PropertyCell::SetValueWithInvalidation(
4096
      this, "promise_hook_protector", factory()->promise_hook_protector(),
4097 4098 4099 4100
      handle(Smi::FromInt(kProtectorInvalid), this));
  DCHECK(!IsPromiseHookProtectorIntact());
}

4101
void Isolate::InvalidatePromiseResolveProtector() {
4102
  DCHECK(factory()->promise_resolve_protector()->value().IsSmi());
4103
  DCHECK(IsPromiseResolveLookupChainIntact());
4104 4105 4106
  if (FLAG_trace_protector_invalidation) {
    TraceProtectorInvalidation("promise_resolve_protector");
  }
4107 4108 4109 4110 4111
  factory()->promise_resolve_protector()->set_value(
      Smi::FromInt(kProtectorInvalid));
  DCHECK(!IsPromiseResolveLookupChainIntact());
}

4112
void Isolate::InvalidatePromiseThenProtector() {
4113
  DCHECK(factory()->promise_then_protector()->value().IsSmi());
4114 4115
  DCHECK(IsPromiseThenLookupChainIntact());
  PropertyCell::SetValueWithInvalidation(
4116
      this, "promise_then_protector", factory()->promise_then_protector(),
4117 4118 4119 4120
      handle(Smi::FromInt(kProtectorInvalid), this));
  DCHECK(!IsPromiseThenLookupChainIntact());
}

4121
bool Isolate::IsAnyInitialArrayPrototype(Handle<JSArray> array) {
4122 4123
  DisallowHeapAllocation no_gc;
  return IsInAnyContext(*array, Context::INITIAL_ARRAY_PROTOTYPE_INDEX);
4124 4125
}

4126 4127 4128 4129 4130
static base::RandomNumberGenerator* ensure_rng_exists(
    base::RandomNumberGenerator** rng, int seed) {
  if (*rng == nullptr) {
    if (seed != 0) {
      *rng = new base::RandomNumberGenerator(seed);
4131
    } else {
4132
      *rng = new base::RandomNumberGenerator();
4133 4134
    }
  }
4135 4136 4137 4138
  return *rng;
}

base::RandomNumberGenerator* Isolate::random_number_generator() {
4139 4140
  // TODO(bmeurer) Initialized lazily because it depends on flags; can
  // be fixed once the default isolate cleanup is done.
4141 4142 4143 4144
  return ensure_rng_exists(&random_number_generator_, FLAG_random_seed);
}

base::RandomNumberGenerator* Isolate::fuzzer_rng() {
4145 4146 4147 4148 4149 4150 4151 4152 4153 4154
  if (fuzzer_rng_ == nullptr) {
    int64_t seed = FLAG_fuzzer_random_seed;
    if (seed == 0) {
      seed = random_number_generator()->initial_seed();
    }

    fuzzer_rng_ = new base::RandomNumberGenerator(seed);
  }

  return fuzzer_rng_;
4155 4156
}

4157 4158 4159 4160 4161 4162 4163 4164
int Isolate::GenerateIdentityHash(uint32_t mask) {
  int hash;
  int attempts = 0;
  do {
    hash = random_number_generator()->NextInt() & mask;
  } while (hash == 0 && attempts++ < 30);
  return hash != 0 ? hash : 1;
}
4165

4166
Code Isolate::FindCodeObject(Address a) {
4167
  return heap()->GcSafeFindCodeForInnerPointer(a);
4168 4169
}

4170
#ifdef DEBUG
4171 4172
#define ISOLATE_FIELD_OFFSET(type, name, ignored) \
  const intptr_t Isolate::name##_debug_offset_ = OFFSET_OF(Isolate, name##_);
4173 4174 4175 4176 4177
ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
#undef ISOLATE_FIELD_OFFSET
#endif

4178
Handle<Symbol> Isolate::SymbolFor(RootIndex dictionary_index,
4179 4180 4181
                                  Handle<String> name, bool private_symbol) {
  Handle<String> key = factory()->InternalizeString(name);
  Handle<NameDictionary> dictionary =
4182
      Handle<NameDictionary>::cast(root_handle(dictionary_index));
4183
  int entry = dictionary->FindEntry(this, key);
4184 4185
  Handle<Symbol> symbol;
  if (entry == NameDictionary::kNotFound) {
4186 4187 4188
    symbol =
        private_symbol ? factory()->NewPrivateSymbol() : factory()->NewSymbol();
    symbol->set_name(*key);
4189
    dictionary = NameDictionary::Add(this, dictionary, key, symbol,
4190 4191
                                     PropertyDetails::Empty(), &entry);
    switch (dictionary_index) {
4192
      case RootIndex::kPublicSymbolTable:
4193
        symbol->set_is_in_public_symbol_table(true);
4194 4195
        heap()->set_public_symbol_table(*dictionary);
        break;
4196
      case RootIndex::kApiSymbolTable:
4197 4198
        heap()->set_api_symbol_table(*dictionary);
        break;
4199
      case RootIndex::kApiPrivateSymbolTable:
4200 4201 4202 4203 4204 4205
        heap()->set_api_private_symbol_table(*dictionary);
        break;
      default:
        UNREACHABLE();
    }
  } else {
4206
    symbol = Handle<Symbol>(Symbol::cast(dictionary->ValueAt(entry)), this);
4207
  }
4208
  return symbol;
4209 4210
}

4211
void Isolate::AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback) {
4212 4213 4214 4215
  auto pos = std::find(before_call_entered_callbacks_.begin(),
                       before_call_entered_callbacks_.end(), callback);
  if (pos != before_call_entered_callbacks_.end()) return;
  before_call_entered_callbacks_.push_back(callback);
4216 4217 4218 4219
}

void Isolate::RemoveBeforeCallEnteredCallback(
    BeforeCallEnteredCallback callback) {
4220 4221 4222 4223
  auto pos = std::find(before_call_entered_callbacks_.begin(),
                       before_call_entered_callbacks_.end(), callback);
  if (pos == before_call_entered_callbacks_.end()) return;
  before_call_entered_callbacks_.erase(pos);
4224 4225
}

4226
void Isolate::AddCallCompletedCallback(CallCompletedCallback callback) {
4227 4228 4229 4230
  auto pos = std::find(call_completed_callbacks_.begin(),
                       call_completed_callbacks_.end(), callback);
  if (pos != call_completed_callbacks_.end()) return;
  call_completed_callbacks_.push_back(callback);
4231 4232
}

4233
void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
4234 4235 4236 4237
  auto pos = std::find(call_completed_callbacks_.begin(),
                       call_completed_callbacks_.end(), callback);
  if (pos == call_completed_callbacks_.end()) return;
  call_completed_callbacks_.erase(pos);
4238 4239
}

4240
void Isolate::FireCallCompletedCallback(MicrotaskQueue* microtask_queue) {
4241 4242
  if (!handle_scope_implementer()->CallDepthIsZero()) return;

4243
  bool run_microtasks =
4244 4245 4246
      microtask_queue && microtask_queue->size() &&
      !microtask_queue->HasMicrotasksSuppressions() &&
      microtask_queue->microtasks_policy() == v8::MicrotasksPolicy::kAuto;
4247

4248
  if (run_microtasks) {
4249
    microtask_queue->RunMicrotasks(this);
4250
  }
4251

4252
  if (call_completed_callbacks_.empty()) return;
4253
  // Fire callbacks.  Increase call depth to prevent recursive callbacks.
4254 4255
  v8::Isolate* isolate = reinterpret_cast<v8::Isolate*>(this);
  v8::Isolate::SuppressMicrotaskExecutionScope suppress(isolate);
4256 4257
  std::vector<CallCompletedCallback> callbacks(call_completed_callbacks_);
  for (auto& callback : callbacks) {
4258
    callback(reinterpret_cast<v8::Isolate*>(this));
4259 4260 4261
  }
}

4262
void Isolate::PromiseHookStateUpdated() {
4263 4264 4265 4266 4267 4268
  bool promise_hook_or_async_event_delegate =
      promise_hook_ || async_event_delegate_;
  bool promise_hook_or_debug_is_active_or_async_event_delegate =
      promise_hook_or_async_event_delegate || debug()->is_active();
  if (promise_hook_or_debug_is_active_or_async_event_delegate &&
      IsPromiseHookProtectorIntact()) {
4269
    HandleScope scope(this);
4270 4271
    InvalidatePromiseHookProtector();
  }
4272 4273 4274
  promise_hook_or_async_event_delegate_ = promise_hook_or_async_event_delegate;
  promise_hook_or_debug_is_active_or_async_event_delegate_ =
      promise_hook_or_debug_is_active_or_async_event_delegate;
4275 4276
}

4277 4278 4279 4280 4281 4282
namespace {

MaybeHandle<JSPromise> NewRejectedPromise(Isolate* isolate,
                                          v8::Local<v8::Context> api_context,
                                          Handle<Object> exception) {
  v8::Local<v8::Promise::Resolver> resolver;
4283 4284 4285
  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
      isolate, resolver, v8::Promise::Resolver::New(api_context),
      MaybeHandle<JSPromise>());
4286

4287 4288 4289
  RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
      isolate, resolver->Reject(api_context, v8::Utils::ToLocal(exception)),
      MaybeHandle<JSPromise>());
4290 4291 4292 4293 4294 4295 4296 4297

  v8::Local<v8::Promise> promise = resolver->GetPromise();
  return v8::Utils::OpenHandle(*promise);
}

}  // namespace

MaybeHandle<JSPromise> Isolate::RunHostImportModuleDynamicallyCallback(
4298
    Handle<Script> referrer, Handle<Object> specifier) {
4299 4300
  v8::Local<v8::Context> api_context =
      v8::Utils::ToLocal(Handle<Context>(native_context()));
4301

4302 4303 4304
  if (host_import_module_dynamically_callback_ == nullptr) {
    Handle<Object> exception =
        factory()->NewError(error_function(), MessageTemplate::kUnsupported);
4305 4306 4307 4308 4309 4310 4311 4312 4313 4314
    return NewRejectedPromise(this, api_context, exception);
  }

  Handle<String> specifier_str;
  MaybeHandle<String> maybe_specifier = Object::ToString(this, specifier);
  if (!maybe_specifier.ToHandle(&specifier_str)) {
    Handle<Object> exception(pending_exception(), this);
    clear_pending_exception();

    return NewRejectedPromise(this, api_context, exception);
4315
  }
4316
  DCHECK(!has_pending_exception());
4317

4318 4319 4320
  v8::Local<v8::Promise> promise;
  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
      this, promise,
4321
      host_import_module_dynamically_callback_(
4322
          api_context, v8::Utils::ScriptOrModuleToLocal(referrer),
4323 4324
          v8::Utils::ToLocal(specifier_str)),
      MaybeHandle<JSPromise>());
4325
  return v8::Utils::OpenHandle(*promise);
4326 4327
}

4328 4329 4330 4331 4332 4333 4334 4335 4336 4337 4338 4339 4340 4341 4342 4343 4344
void Isolate::ClearKeptObjects() { heap()->ClearKeptObjects(); }

void Isolate::SetHostCleanupFinalizationGroupCallback(
    HostCleanupFinalizationGroupCallback callback) {
  host_cleanup_finalization_group_callback_ = callback;
}

void Isolate::RunHostCleanupFinalizationGroupCallback(
    Handle<JSFinalizationGroup> fg) {
  if (host_cleanup_finalization_group_callback_ != nullptr) {
    v8::Local<v8::Context> api_context =
        v8::Utils::ToLocal(handle(Context::cast(fg->native_context()), this));
    host_cleanup_finalization_group_callback_(api_context,
                                              v8::Utils::ToLocal(fg));
  }
}

4345 4346 4347 4348 4349
void Isolate::SetHostImportModuleDynamicallyCallback(
    HostImportModuleDynamicallyCallback callback) {
  host_import_module_dynamically_callback_ = callback;
}

4350
Handle<JSObject> Isolate::RunHostInitializeImportMetaObjectCallback(
4351
    Handle<SourceTextModule> module) {
4352 4353 4354 4355
  Handle<Object> host_meta(module->import_meta(), this);
  if (host_meta->IsTheHole(this)) {
    host_meta = factory()->NewJSObjectWithNullProto();
    if (host_initialize_import_meta_object_callback_ != nullptr) {
4356 4357
      v8::Local<v8::Context> api_context =
          v8::Utils::ToLocal(Handle<Context>(native_context()));
4358
      host_initialize_import_meta_object_callback_(
4359
          api_context, Utils::ToLocal(Handle<Module>::cast(module)),
4360 4361 4362 4363 4364 4365 4366 4367 4368 4369 4370 4371
          v8::Local<v8::Object>::Cast(v8::Utils::ToLocal(host_meta)));
    }
    module->set_import_meta(*host_meta);
  }
  return Handle<JSObject>::cast(host_meta);
}

void Isolate::SetHostInitializeImportMetaObjectCallback(
    HostInitializeImportMetaObjectCallback callback) {
  host_initialize_import_meta_object_callback_ = callback;
}

4372
MaybeHandle<Object> Isolate::RunPrepareStackTraceCallback(
4373
    Handle<Context> context, Handle<JSObject> error, Handle<JSArray> sites) {
4374 4375 4376 4377 4378
  v8::Local<v8::Context> api_context = Utils::ToLocal(context);

  v8::Local<v8::Value> stack;
  ASSIGN_RETURN_ON_SCHEDULED_EXCEPTION_VALUE(
      this, stack,
4379 4380
      prepare_stack_trace_callback_(api_context, Utils::ToLocal(error),
                                    Utils::ToLocal(sites)),
4381 4382 4383 4384
      MaybeHandle<Object>());
  return Utils::OpenHandle(*stack);
}

4385 4386 4387 4388 4389 4390 4391 4392 4393 4394 4395 4396 4397 4398 4399 4400 4401 4402 4403 4404 4405 4406 4407 4408 4409 4410 4411 4412 4413
int Isolate::LookupOrAddExternallyCompiledFilename(const char* filename) {
  if (embedded_file_writer_ != nullptr) {
    return embedded_file_writer_->LookupOrAddExternallyCompiledFilename(
        filename);
  }
  return 0;
}

const char* Isolate::GetExternallyCompiledFilename(int index) const {
  if (embedded_file_writer_ != nullptr) {
    return embedded_file_writer_->GetExternallyCompiledFilename(index);
  }
  return "";
}

int Isolate::GetExternallyCompiledFilenameCount() const {
  if (embedded_file_writer_ != nullptr) {
    return embedded_file_writer_->GetExternallyCompiledFilenameCount();
  }
  return 0;
}

void Isolate::PrepareBuiltinSourcePositionMap() {
  if (embedded_file_writer_ != nullptr) {
    return embedded_file_writer_->PrepareBuiltinSourcePositionMap(
        this->builtins());
  }
}

4414
#if defined(V8_OS_WIN64)
4415 4416 4417 4418 4419 4420 4421
void Isolate::SetBuiltinUnwindData(
    int builtin_index,
    const win64_unwindinfo::BuiltinUnwindInfo& unwinding_info) {
  if (embedded_file_writer_ != nullptr) {
    embedded_file_writer_->SetBuiltinUnwindData(builtin_index, unwinding_info);
  }
}
4422
#endif  // V8_OS_WIN64
4423

4424 4425 4426 4427 4428 4429 4430 4431
void Isolate::SetPrepareStackTraceCallback(PrepareStackTraceCallback callback) {
  prepare_stack_trace_callback_ = callback;
}

bool Isolate::HasPrepareStackTraceCallback() const {
  return prepare_stack_trace_callback_ != nullptr;
}

4432 4433 4434 4435 4436 4437 4438 4439
void Isolate::SetAtomicsWaitCallback(v8::Isolate::AtomicsWaitCallback callback,
                                     void* data) {
  atomics_wait_callback_ = callback;
  atomics_wait_callback_data_ = data;
}

void Isolate::RunAtomicsWaitCallback(v8::Isolate::AtomicsWaitEvent event,
                                     Handle<JSArrayBuffer> array_buffer,
4440
                                     size_t offset_in_bytes, int64_t value,
4441 4442 4443 4444 4445 4446 4447 4448 4449 4450 4451 4452
                                     double timeout_in_ms,
                                     AtomicsWaitWakeHandle* stop_handle) {
  DCHECK(array_buffer->is_shared());
  if (atomics_wait_callback_ == nullptr) return;
  HandleScope handle_scope(this);
  atomics_wait_callback_(
      event, v8::Utils::ToLocalShared(array_buffer), offset_in_bytes, value,
      timeout_in_ms,
      reinterpret_cast<v8::Isolate::AtomicsWaitWakeHandle*>(stop_handle),
      atomics_wait_callback_data_);
}

4453 4454
void Isolate::SetPromiseHook(PromiseHook hook) {
  promise_hook_ = hook;
4455
  PromiseHookStateUpdated();
4456
}
4457

4458 4459
void Isolate::RunPromiseHook(PromiseHookType type, Handle<JSPromise> promise,
                             Handle<Object> parent) {
4460
  RunPromiseHookForAsyncEventDelegate(type, promise);
4461 4462 4463
  if (promise_hook_ == nullptr) return;
  promise_hook_(type, v8::Utils::PromiseToLocal(promise),
                v8::Utils::ToLocal(parent));
4464 4465 4466 4467 4468 4469 4470 4471 4472 4473 4474 4475 4476 4477 4478 4479 4480 4481 4482 4483 4484 4485 4486 4487 4488 4489 4490 4491 4492 4493 4494 4495 4496 4497 4498 4499 4500 4501 4502 4503 4504 4505 4506 4507 4508 4509 4510 4511 4512 4513 4514 4515 4516 4517 4518 4519 4520 4521 4522 4523 4524 4525 4526 4527
}

void Isolate::RunPromiseHookForAsyncEventDelegate(PromiseHookType type,
                                                  Handle<JSPromise> promise) {
  if (!async_event_delegate_) return;
  if (type == PromiseHookType::kResolve) return;

  if (type == PromiseHookType::kBefore) {
    if (!promise->async_task_id()) return;
    async_event_delegate_->AsyncEventOccurred(debug::kDebugWillHandle,
                                              promise->async_task_id(), false);
  } else if (type == PromiseHookType::kAfter) {
    if (!promise->async_task_id()) return;
    async_event_delegate_->AsyncEventOccurred(debug::kDebugDidHandle,
                                              promise->async_task_id(), false);
  } else {
    DCHECK(type == PromiseHookType::kInit);
    debug::DebugAsyncActionType type = debug::kDebugPromiseThen;
    bool last_frame_was_promise_builtin = false;
    JavaScriptFrameIterator it(this);
    while (!it.done()) {
      std::vector<Handle<SharedFunctionInfo>> infos;
      it.frame()->GetFunctions(&infos);
      for (size_t i = 1; i <= infos.size(); ++i) {
        Handle<SharedFunctionInfo> info = infos[infos.size() - i];
        if (info->IsUserJavaScript()) {
          // We should not report PromiseThen and PromiseCatch which is called
          // indirectly, e.g. Promise.all calls Promise.then internally.
          if (last_frame_was_promise_builtin) {
            if (!promise->async_task_id()) {
              promise->set_async_task_id(++async_task_count_);
            }
            async_event_delegate_->AsyncEventOccurred(
                type, promise->async_task_id(), debug()->IsBlackboxed(info));
          }
          return;
        }
        last_frame_was_promise_builtin = false;
        if (info->HasBuiltinId()) {
          if (info->builtin_id() == Builtins::kPromisePrototypeThen) {
            type = debug::kDebugPromiseThen;
            last_frame_was_promise_builtin = true;
          } else if (info->builtin_id() == Builtins::kPromisePrototypeCatch) {
            type = debug::kDebugPromiseCatch;
            last_frame_was_promise_builtin = true;
          } else if (info->builtin_id() == Builtins::kPromisePrototypeFinally) {
            type = debug::kDebugPromiseFinally;
            last_frame_was_promise_builtin = true;
          }
        }
      }
      it.Advance();
    }
  }
}

void Isolate::OnAsyncFunctionStateChanged(Handle<JSPromise> promise,
                                          debug::DebugAsyncActionType event) {
  if (!async_event_delegate_) return;
  if (!promise->async_task_id()) {
    promise->set_async_task_id(++async_task_count_);
  }
  async_event_delegate_->AsyncEventOccurred(event, promise->async_task_id(),
                                            false);
4528
}
4529

4530 4531 4532 4533
void Isolate::SetPromiseRejectCallback(PromiseRejectCallback callback) {
  promise_reject_callback_ = callback;
}

4534
void Isolate::ReportPromiseReject(Handle<JSPromise> promise,
4535 4536
                                  Handle<Object> value,
                                  v8::PromiseRejectEvent event) {
4537
  if (promise_reject_callback_ == nullptr) return;
4538
  promise_reject_callback_(v8::PromiseRejectMessage(
4539
      v8::Utils::PromiseToLocal(promise), event, v8::Utils::ToLocal(value)));
4540 4541
}

4542
void Isolate::SetUseCounterCallback(v8::Isolate::UseCounterCallback callback) {
4543
  DCHECK(!use_counter_callback_);
4544 4545 4546 4547
  use_counter_callback_ = callback;
}

void Isolate::CountUsage(v8::Isolate::UseCounterFeature feature) {
4548 4549 4550 4551 4552 4553 4554 4555 4556
  // The counter callback may cause the embedder to call into V8, which is not
  // generally possible during GC.
  if (heap_.gc_state() == Heap::NOT_IN_GC) {
    if (use_counter_callback_) {
      HandleScope handle_scope(this);
      use_counter_callback_(reinterpret_cast<v8::Isolate*>(this), feature);
    }
  } else {
    heap_.IncrementDeferredCount(feature);
4557 4558 4559
  }
}

4560 4561
// static
std::string Isolate::GetTurboCfgFileName(Isolate* isolate) {
4562
  if (FLAG_trace_turbo_cfg_file == nullptr) {
4563
    std::ostringstream os;
4564 4565 4566 4567 4568 4569 4570
    os << "turbo-" << base::OS::GetCurrentProcessId() << "-";
    if (isolate != nullptr) {
      os << isolate->id();
    } else {
      os << "any";
    }
    os << ".cfg";
4571
    return os.str();
4572
  } else {
4573
    return FLAG_trace_turbo_cfg_file;
4574
  }
4575 4576
}

4577 4578 4579 4580
// Heap::detached_contexts tracks detached contexts as pairs
// (number of GC since the context was detached, the context).
void Isolate::AddDetachedContext(Handle<Context> context) {
  HandleScope scope(this);
4581 4582 4583 4584 4585
  Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
  detached_contexts = WeakArrayList::AddToEnd(
      this, detached_contexts, MaybeObjectHandle(Smi::kZero, this));
  detached_contexts = WeakArrayList::AddToEnd(this, detached_contexts,
                                              MaybeObjectHandle::Weak(context));
4586 4587 4588 4589 4590
  heap()->set_detached_contexts(*detached_contexts);
}

void Isolate::CheckDetachedContextsAfterGC() {
  HandleScope scope(this);
4591
  Handle<WeakArrayList> detached_contexts = factory()->detached_contexts();
4592 4593 4594 4595
  int length = detached_contexts->length();
  if (length == 0) return;
  int new_length = 0;
  for (int i = 0; i < length; i += 2) {
4596
    int mark_sweeps = detached_contexts->Get(i).ToSmi().value();
4597
    MaybeObject context = detached_contexts->Get(i + 1);
4598 4599
    DCHECK(context->IsWeakOrCleared());
    if (!context->IsCleared()) {
4600 4601 4602
      detached_contexts->Set(
          new_length, MaybeObject::FromSmi(Smi::FromInt(mark_sweeps + 1)));
      detached_contexts->Set(new_length + 1, context);
4603 4604 4605
      new_length += 2;
    }
  }
4606 4607
  detached_contexts->set_length(new_length);
  while (new_length < length) {
4608
    detached_contexts->Set(new_length, MaybeObject::FromSmi(Smi::zero()));
4609 4610 4611
    ++new_length;
  }

4612 4613 4614 4615
  if (FLAG_trace_detached_contexts) {
    PrintF("%d detached contexts are collected out of %d\n",
           length - new_length, length);
    for (int i = 0; i < new_length; i += 2) {
4616
      int mark_sweeps = detached_contexts->Get(i).ToSmi().value();
4617
      MaybeObject context = detached_contexts->Get(i + 1);
4618
      DCHECK(context->IsWeakOrCleared());
4619
      if (mark_sweeps > 3) {
jfb's avatar
jfb committed
4620
        PrintF("detached context %p\n survived %d GCs (leak?)\n",
4621
               reinterpret_cast<void*>(context.ptr()), mark_sweeps);
4622
      }
4623 4624 4625 4626
    }
  }
}

4627
double Isolate::LoadStartTimeMs() {
4628
  base::MutexGuard guard(&rail_mutex_);
4629 4630 4631
  return load_start_time_ms_;
}

hpayer's avatar
hpayer committed
4632
void Isolate::SetRAILMode(RAILMode rail_mode) {
4633
  RAILMode old_rail_mode = rail_mode_.load();
4634
  if (old_rail_mode != PERFORMANCE_LOAD && rail_mode == PERFORMANCE_LOAD) {
4635
    base::MutexGuard guard(&rail_mutex_);
4636 4637
    load_start_time_ms_ = heap()->MonotonicallyIncreasingTimeInMs();
  }
4638
  rail_mode_.store(rail_mode);
4639 4640 4641 4642
  if (old_rail_mode == PERFORMANCE_LOAD && rail_mode != PERFORMANCE_LOAD) {
    heap()->incremental_marking()->incremental_marking_job()->ScheduleTask(
        heap());
  }
hpayer's avatar
hpayer committed
4643
  if (FLAG_trace_rail) {
4644
    PrintIsolate(this, "RAIL mode: %s\n", RAILModeName(rail_mode));
hpayer's avatar
hpayer committed
4645 4646
  }
}
4647

4648
void Isolate::IsolateInBackgroundNotification() {
4649
  is_isolate_in_background_ = true;
4650 4651 4652 4653
  heap()->ActivateMemoryReducerIfNeeded();
}

void Isolate::IsolateInForegroundNotification() {
4654
  is_isolate_in_background_ = false;
4655 4656
}

4657 4658 4659 4660 4661 4662 4663 4664 4665
void Isolate::PrintWithTimestamp(const char* format, ...) {
  base::OS::Print("[%d:%p] %8.0f ms: ", base::OS::GetCurrentProcessId(),
                  static_cast<void*>(this), time_millis_since_init());
  va_list arguments;
  va_start(arguments, format);
  base::OS::VPrint(format, arguments);
  va_end(arguments);
}

4666 4667 4668 4669
void Isolate::SetIdle(bool is_idle) {
  if (!is_profiling()) return;
  StateTag state = current_vm_state();
  DCHECK(state == EXTERNAL || state == IDLE);
4670
  if (js_entry_sp() != kNullAddress) return;
4671 4672 4673 4674 4675 4676 4677
  if (is_idle) {
    set_current_vm_state(IDLE);
  } else if (state == IDLE) {
    set_current_vm_state(EXTERNAL);
  }
}

4678
#ifdef V8_INTL_SUPPORT
4679
icu::UMemory* Isolate::get_cached_icu_object(ICUObjectCacheType cache_type) {
4680 4681 4682 4683
  return icu_object_cache_[cache_type].get();
}

void Isolate::set_icu_object_in_cache(ICUObjectCacheType cache_type,
4684
                                      std::shared_ptr<icu::UMemory> obj) {
4685 4686 4687 4688 4689 4690 4691 4692
  icu_object_cache_[cache_type] = obj;
}

void Isolate::clear_cached_icu_object(ICUObjectCacheType cache_type) {
  icu_object_cache_.erase(cache_type);
}
#endif  // V8_INTL_SUPPORT

4693
bool StackLimitCheck::JsHasOverflowed(uintptr_t gap) const {
4694 4695 4696 4697
  StackGuard* stack_guard = isolate_->stack_guard();
#ifdef USE_SIMULATOR
  // The simulator uses a separate JS stack.
  Address jssp_address = Simulator::current(isolate_)->get_sp();
4698
  uintptr_t jssp = static_cast<uintptr_t>(jssp_address);
4699
  if (jssp - gap < stack_guard->real_jslimit()) return true;
4700
#endif  // USE_SIMULATOR
4701
  return GetCurrentStackPosition() - gap < stack_guard->real_climit();
4702 4703
}

4704
SaveContext::SaveContext(Isolate* isolate) : isolate_(isolate) {
4705
  if (!isolate->context().is_null()) {
4706
    context_ = Handle<Context>(isolate->context(), isolate);
4707 4708 4709 4710 4711 4712
  }

  c_entry_fp_ = isolate->c_entry_fp(isolate->thread_local_top());
}

SaveContext::~SaveContext() {
4713
  isolate_->set_context(context_.is_null() ? Context() : *context_);
4714 4715
}

4716 4717 4718 4719
bool SaveContext::IsBelowFrame(StandardFrame* frame) {
  return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
}

4720 4721 4722 4723 4724 4725
SaveAndSwitchContext::SaveAndSwitchContext(Isolate* isolate,
                                           Context new_context)
    : SaveContext(isolate) {
  isolate->set_context(new_context);
}

4726 4727 4728 4729 4730
#ifdef DEBUG
AssertNoContextChange::AssertNoContextChange(Isolate* isolate)
    : isolate_(isolate), context_(isolate->context(), isolate) {}
#endif  // DEBUG

4731 4732
#undef TRACE_ISOLATE

4733 4734
}  // namespace internal
}  // namespace v8