isolate.h 58.1 KB
Newer Older
1
// Copyright 2012 the V8 project authors. All rights reserved.
2 3
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
4 5 6 7

#ifndef V8_ISOLATE_H_
#define V8_ISOLATE_H_

8
#include <memory>
9
#include <queue>
10

11 12
#include "include/v8-debug.h"
#include "src/allocation.h"
13
#include "src/base/atomicops.h"
14
#include "src/builtins/builtins.h"
15
#include "src/contexts.h"
16
#include "src/date.h"
17 18
#include "src/execution.h"
#include "src/frames.h"
binji's avatar
binji committed
19
#include "src/futex-emulation.h"
20 21
#include "src/global-handles.h"
#include "src/handles.h"
22
#include "src/heap/heap.h"
23
#include "src/messages.h"
24
#include "src/regexp/regexp-stack.h"
25
#include "src/runtime/runtime.h"
26
#include "src/zone/zone.h"
27 28

namespace v8 {
29 30 31 32 33

namespace base {
class RandomNumberGenerator;
}

34 35
namespace internal {

36
class AccessCompilerData;
37
class AddressToIndexHashMap;
38
class AstStringConstants;
39
class BasicBlockProfiler;
40
class Bootstrapper;
41
class CancelableTaskManager;
42
class CallInterfaceDescriptorData;
43 44
class CodeAgingHelper;
class CodeEventDispatcher;
45 46
class CodeGenerator;
class CodeRange;
47
class CodeStubDescriptor;
48
class CodeTracer;
49
class CompilationCache;
50
class CompilerDispatcher;
51
class CompilationStatistics;
52 53 54 55 56
class ContextSlotCache;
class Counters;
class CpuFeatures;
class CpuProfiler;
class DeoptimizerData;
57
class DescriptorLookupCache;
58 59
class Deserializer;
class EmptyStatement;
60
class ExternalCallbackScope;
61 62 63
class ExternalReferenceTable;
class Factory;
class HandleScopeImplementer;
64
class HeapObjectToIndexHashMap;
65
class HeapProfiler;
66
class HStatistics;
67
class HTracer;
68
class InlineRuntimeFunctionsTable;
69
class InnerPointerToCodeCache;
70
class Logger;
jarin@chromium.org's avatar
jarin@chromium.org committed
71
class MaterializedObjectStore;
72
class OptimizingCompileDispatcher;
73
class RegExpStack;
74
class RuntimeProfiler;
75
class SaveContext;
76
class StatsTable;
77 78
class StringTracker;
class StubCache;
79
class SweeperThread;
80 81 82
class ThreadManager;
class ThreadState;
class ThreadVisitor;  // Defined in v8threads.h
jarin@chromium.org's avatar
jarin@chromium.org committed
83
class UnicodeCache;
84
template <StateTag Tag> class VMState;
85 86 87 88 89 90 91 92

// 'void function pointer', used to roundtrip the
// ExternalReference::ExternalReferenceRedirector since we can not include
// assembler.h, where it is defined, here.
typedef void* ExternalReferenceRedirectorPointer();


class Debug;
93
class PromiseOnStack;
94 95 96
class Redirection;
class Simulator;

97 98 99
namespace interpreter {
class Interpreter;
}
100

101
#define RETURN_FAILURE_IF_SCHEDULED_EXCEPTION(isolate)    \
102 103 104 105 106 107
  do {                                                    \
    Isolate* __isolate__ = (isolate);                     \
    if (__isolate__->has_scheduled_exception()) {         \
      return __isolate__->PromoteScheduledException();    \
    }                                                     \
  } while (false)
108

109 110
// Macros for MaybeHandle.

111 112 113 114 115 116 117
#define RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, value) \
  do {                                                      \
    Isolate* __isolate__ = (isolate);                       \
    if (__isolate__->has_scheduled_exception()) {           \
      __isolate__->PromoteScheduledException();             \
      return value;                                         \
    }                                                       \
118 119
  } while (false)

120 121 122
#define RETURN_EXCEPTION_IF_SCHEDULED_EXCEPTION(isolate, T) \
  RETURN_VALUE_IF_SCHEDULED_EXCEPTION(isolate, MaybeHandle<T>())

123 124 125 126 127 128 129 130 131 132 133
#define RETURN_RESULT_OR_FAILURE(isolate, call)     \
  do {                                              \
    Handle<Object> __result__;                      \
    Isolate* __isolate__ = (isolate);               \
    if (!(call).ToHandle(&__result__)) {            \
      DCHECK(__isolate__->has_pending_exception()); \
      return __isolate__->heap()->exception();      \
    }                                               \
    return *__result__;                             \
  } while (false)

134 135 136
#define ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, value)  \
  do {                                                               \
    if (!(call).ToHandle(&dst)) {                                    \
137
      DCHECK((isolate)->has_pending_exception());                    \
138 139 140 141
      return value;                                                  \
    }                                                                \
  } while (false)

142 143 144 145 146 147
#define ASSIGN_RETURN_FAILURE_ON_EXCEPTION(isolate, dst, call)          \
  do {                                                                  \
    Isolate* __isolate__ = (isolate);                                   \
    ASSIGN_RETURN_ON_EXCEPTION_VALUE(__isolate__, dst, call,            \
                                     __isolate__->heap()->exception()); \
  } while (false)
148 149 150 151

#define ASSIGN_RETURN_ON_EXCEPTION(isolate, dst, call, T)  \
  ASSIGN_RETURN_ON_EXCEPTION_VALUE(isolate, dst, call, MaybeHandle<T>())

152 153 154 155
#define THROW_NEW_ERROR(isolate, call, T)                       \
  do {                                                          \
    Isolate* __isolate__ = (isolate);                           \
    return __isolate__->Throw<T>(__isolate__->factory()->call); \
156 157
  } while (false)

158 159 160 161
#define THROW_NEW_ERROR_RETURN_FAILURE(isolate, call)         \
  do {                                                        \
    Isolate* __isolate__ = (isolate);                         \
    return __isolate__->Throw(*__isolate__->factory()->call); \
162 163
  } while (false)

164
#define RETURN_ON_EXCEPTION_VALUE(isolate, call, value)            \
165
  do {                                                             \
166
    if ((call).is_null()) {                                        \
167
      DCHECK((isolate)->has_pending_exception());                  \
168 169 170 171
      return value;                                                \
    }                                                              \
  } while (false)

172 173 174 175 176 177
#define RETURN_FAILURE_ON_EXCEPTION(isolate, call)               \
  do {                                                           \
    Isolate* __isolate__ = (isolate);                            \
    RETURN_ON_EXCEPTION_VALUE(__isolate__, call,                 \
                              __isolate__->heap()->exception()); \
  } while (false);
178 179

#define RETURN_ON_EXCEPTION(isolate, call, T)  \
180
  RETURN_ON_EXCEPTION_VALUE(isolate, call, MaybeHandle<T>())
181 182


183 184 185
#define FOR_EACH_ISOLATE_ADDRESS_NAME(C)                \
  C(Handler, handler)                                   \
  C(CEntryFP, c_entry_fp)                               \
186
  C(CFunction, c_function)                              \
187 188
  C(Context, context)                                   \
  C(PendingException, pending_exception)                \
189 190 191 192 193
  C(PendingHandlerContext, pending_handler_context)     \
  C(PendingHandlerCode, pending_handler_code)           \
  C(PendingHandlerOffset, pending_handler_offset)       \
  C(PendingHandlerFP, pending_handler_fp)               \
  C(PendingHandlerSP, pending_handler_sp)               \
194 195
  C(ExternalCaughtException, external_caught_exception) \
  C(JSEntrySP, js_entry_sp)
196

197 198 199 200 201 202 203 204 205 206 207 208 209 210
#define FOR_WITH_HANDLE_SCOPE(isolate, loop_var_type, init, loop_var,      \
                              limit_check, increment, body)                \
  do {                                                                     \
    loop_var_type init;                                                    \
    loop_var_type for_with_handle_limit = loop_var;                        \
    Isolate* for_with_handle_isolate = isolate;                            \
    while (limit_check) {                                                  \
      for_with_handle_limit += 1024;                                       \
      HandleScope loop_scope(for_with_handle_isolate);                     \
      for (; limit_check && loop_var < for_with_handle_limit; increment) { \
        body                                                               \
      }                                                                    \
    }                                                                      \
  } while (false)
211

212 213 214 215
// Platform-independent, reliable thread identifier.
class ThreadId {
 public:
  // Creates an invalid ThreadId.
216 217 218 219 220 221
  ThreadId() { base::NoBarrier_Store(&id_, kInvalidId); }

  ThreadId& operator=(const ThreadId& other) {
    base::NoBarrier_Store(&id_, base::NoBarrier_Load(&other.id_));
    return *this;
  }
222 223 224 225 226 227 228 229 230

  // Returns ThreadId for current thread.
  static ThreadId Current() { return ThreadId(GetCurrentThreadId()); }

  // Returns invalid ThreadId (guaranteed not to be equal to any thread).
  static ThreadId Invalid() { return ThreadId(kInvalidId); }

  // Compares ThreadIds for equality.
  INLINE(bool Equals(const ThreadId& other) const) {
231
    return base::NoBarrier_Load(&id_) == base::NoBarrier_Load(&other.id_);
232 233 234 235
  }

  // Checks whether this ThreadId refers to any thread.
  INLINE(bool IsValid() const) {
236
    return base::NoBarrier_Load(&id_) != kInvalidId;
237 238 239 240
  }

  // Converts ThreadId to an integer representation
  // (required for public API: V8::V8::GetCurrentThreadId).
241
  int ToInteger() const { return static_cast<int>(base::NoBarrier_Load(&id_)); }
242 243 244 245 246 247 248 249

  // Converts ThreadId to an integer representation
  // (required for public API: V8::V8::TerminateExecution).
  static ThreadId FromInteger(int id) { return ThreadId(id); }

 private:
  static const int kInvalidId = -1;

250
  explicit ThreadId(int id) { base::NoBarrier_Store(&id_, id); }
251 252 253

  static int AllocateThreadId();

254
  V8_EXPORT_PRIVATE static int GetCurrentThreadId();
255

256
  base::Atomic32 id_;
257

258
  static base::Atomic32 highest_thread_id_;
259 260 261 262 263

  friend class Isolate;
};


264 265 266 267 268
#define FIELD_ACCESSOR(type, name)                 \
  inline void set_##name(type v) { name##_ = v; }  \
  inline type name() const { return name##_; }


269 270
class ThreadLocalTop BASE_EMBEDDED {
 public:
271 272 273 274
  // Does early low-level initialization that does not depend on the
  // isolate being present.
  ThreadLocalTop();

275 276 277 278 279
  // Initialize the thread data.
  void Initialize();

  // Get the top C++ try catch handler or NULL if none are registered.
  //
280
  // This method is not guaranteed to return an address that can be
281 282
  // used for comparison with addresses into the JS stack.  If such an
  // address is needed, use try_catch_handler_address.
283
  FIELD_ACCESSOR(v8::TryCatch*, try_catch_handler)
284 285 286 287 288 289 290 291 292 293 294

  // Get the address of the top C++ try catch handler or NULL if
  // none are registered.
  //
  // This method always returns an address that can be compared to
  // pointers into the JavaScript stack.  When running on actual
  // hardware, try_catch_handler_address and TryCatchHandler return
  // the same pointer.  When running on a simulator with a separate JS
  // stack, try_catch_handler_address returns a JS stack address that
  // corresponds to the place on the JS stack where the C++ handler
  // would have been if the stack were not separate.
295 296 297 298
  Address try_catch_handler_address() {
    return reinterpret_cast<Address>(
        v8::TryCatch::JSStackComparableAddress(try_catch_handler()));
  }
299

300
  void Free();
301

302
  Isolate* isolate_;
303 304 305
  // The context where the current execution method is created and for variable
  // lookups.
  Context* context_;
306
  ThreadId thread_id_;
307
  Object* pending_exception_;
308 309 310 311 312 313 314 315 316

  // Communication channel between Isolate::FindHandler and the CEntryStub.
  Context* pending_handler_context_;
  Code* pending_handler_code_;
  intptr_t pending_handler_offset_;
  Address pending_handler_fp_;
  Address pending_handler_sp_;

  // Communication channel between Isolate::Throw and message consumers.
317
  bool rethrowing_message_;
318
  Object* pending_message_obj_;
319

320 321 322
  // Use a separate value for scheduled exceptions to preserve the
  // invariants that hold about pending_exception.  We may want to
  // unify them later.
323
  Object* scheduled_exception_;
324 325 326 327 328
  bool external_caught_exception_;
  SaveContext* save_context_;

  // Stack.
  Address c_entry_fp_;  // the frame pointer of the top c entry frame
329
  Address handler_;     // try-blocks are chained through the stack
330
  Address c_function_;  // C function that was called at c entry.
331

332 333 334 335 336
  // Throwing an exception may cause a Promise rejection.  For this purpose
  // we keep track of a stack of nested promises and the corresponding
  // try-catch handlers.
  PromiseOnStack* promise_on_stack_;

337 338 339 340
#ifdef USE_SIMULATOR
  Simulator* simulator_;
#endif

341
  Address js_entry_sp_;  // the stack pointer of the bottom JS entry frame
342 343
  // the external callback we're currently in
  ExternalCallbackScope* external_callback_scope_;
344 345 346 347 348 349
  StateTag current_vm_state_;

  // Call back function to report unsafe JS accesses.
  v8::FailedAccessCheckCallback failed_access_check_callback_;

 private:
350 351
  void InitializeInternal();

352
  v8::TryCatch* try_catch_handler_;
353 354 355
};


356
#if USE_SIMULATOR
357

358 359 360
#define ISOLATE_INIT_SIMULATOR_LIST(V)                    \
  V(bool, simulator_initialized, false)                   \
  V(base::CustomMatcherHashMap*, simulator_i_cache, NULL) \
361 362 363 364 365 366 367 368
  V(Redirection*, simulator_redirection, NULL)
#else

#define ISOLATE_INIT_SIMULATOR_LIST(V)

#endif


369 370
#ifdef DEBUG

371 372 373 374
#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)               \
  V(CommentStatistic, paged_space_comments_statistics, \
    CommentStatistic::kMaxComments + 1)                \
  V(int, code_kind_statistics, AbstractCode::NUMBER_OF_KINDS)
375 376 377 378 379 380 381 382
#else

#define ISOLATE_INIT_DEBUG_ARRAY_LIST(V)

#endif

#define ISOLATE_INIT_ARRAY_LIST(V)                                             \
  /* SerializerDeserializer state. */                                          \
383
  V(int32_t, jsregexp_static_offsets_vector, kJSRegexpStaticOffsetsVectorSize) \
384 385 386 387 388
  V(int, bad_char_shift_table, kUC16AlphabetSize)                              \
  V(int, good_suffix_shift_table, (kBMMaxShift + 1))                           \
  V(int, suffix_table, (kBMMaxShift + 1))                                      \
  ISOLATE_INIT_DEBUG_ARRAY_LIST(V)

389
typedef List<HeapObject*> DebugObjectCache;
390

391 392 393
#define ISOLATE_INIT_LIST(V)                                                  \
  /* Assembler state. */                                                      \
  V(FatalErrorCallback, exception_behavior, nullptr)                          \
394
  V(OOMErrorCallback, oom_behavior, nullptr)                                  \
395 396 397 398 399 400 401 402 403 404
  V(LogEventCallback, event_logger, nullptr)                                  \
  V(AllowCodeGenerationFromStringsCallback, allow_code_gen_callback, nullptr) \
  V(ExternalReferenceRedirectorPointer*, external_reference_redirector,       \
    nullptr)                                                                  \
  /* State for Relocatable. */                                                \
  V(Relocatable*, relocatable_top, nullptr)                                   \
  V(DebugObjectCache*, string_stream_debug_object_cache, nullptr)             \
  V(Object*, string_stream_current_security_token, nullptr)                   \
  V(ExternalReferenceTable*, external_reference_table, nullptr)               \
  V(intptr_t*, api_external_references, nullptr)                              \
405 406
  V(AddressToIndexHashMap*, external_reference_map, nullptr)                  \
  V(HeapObjectToIndexHashMap*, root_index_map, nullptr)                       \
407 408 409 410 411 412 413 414 415 416 417 418
  V(int, pending_microtask_count, 0)                                          \
  V(HStatistics*, hstatistics, nullptr)                                       \
  V(CompilationStatistics*, turbo_statistics, nullptr)                        \
  V(HTracer*, htracer, nullptr)                                               \
  V(CodeTracer*, code_tracer, nullptr)                                        \
  V(uint32_t, per_isolate_assert_data, 0xFFFFFFFFu)                           \
  V(PromiseRejectCallback, promise_reject_callback, nullptr)                  \
  V(const v8::StartupData*, snapshot_blob, nullptr)                           \
  V(int, code_and_metadata_size, 0)                                           \
  V(int, bytecode_and_metadata_size, 0)                                       \
  /* true if being profiled. Causes collection of extra compile info. */      \
  V(bool, is_profiling, false)                                                \
jgruber's avatar
jgruber committed
419 420
  /* true if a trace is being formatted through Error.prepareStackTrace. */   \
  V(bool, formatting_stack_trace, false)                                      \
421 422
  /* Perform side effect checks on function call and API callbacks. */        \
  V(bool, needs_side_effect_check, false)                                     \
423
  ISOLATE_INIT_SIMULATOR_LIST(V)
424

425 426 427 428
#define THREAD_LOCAL_TOP_ACCESSOR(type, name)                        \
  inline void set_##name(type v) { thread_local_top_.name##_ = v; }  \
  inline type name() const { return thread_local_top_.name##_; }

429 430 431
#define THREAD_LOCAL_TOP_ADDRESS(type, name) \
  type* name##_address() { return &thread_local_top_.name##_; }

432

433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450
class Isolate {
  // These forward declarations are required to make the friend declarations in
  // PerIsolateThreadData work on some older versions of gcc.
  class ThreadDataTable;
  class EntryStackItem;
 public:
  ~Isolate();

  // A thread has a PerIsolateThreadData instance for each isolate that it has
  // entered. That instance is allocated when the isolate is initially entered
  // and reused on subsequent entries.
  class PerIsolateThreadData {
   public:
    PerIsolateThreadData(Isolate* isolate, ThreadId thread_id)
        : isolate_(isolate),
          thread_id_(thread_id),
          stack_limit_(0),
          thread_state_(NULL),
451
#if USE_SIMULATOR
452 453 454 455
          simulator_(NULL),
#endif
          next_(NULL),
          prev_(NULL) { }
456
    ~PerIsolateThreadData();
457 458
    Isolate* isolate() const { return isolate_; }
    ThreadId thread_id() const { return thread_id_; }
459 460 461

    FIELD_ACCESSOR(uintptr_t, stack_limit)
    FIELD_ACCESSOR(ThreadState*, thread_state)
462

463
#if USE_SIMULATOR
464
    FIELD_ACCESSOR(Simulator*, simulator)
465 466 467
#endif

    bool Matches(Isolate* isolate, ThreadId thread_id) const {
468
      return isolate_ == isolate && thread_id_.Equals(thread_id);
469 470 471 472 473 474 475 476
    }

   private:
    Isolate* isolate_;
    ThreadId thread_id_;
    uintptr_t stack_limit_;
    ThreadState* thread_state_;

477
#if USE_SIMULATOR
478 479 480 481 482 483 484 485 486 487 488 489 490 491 492
    Simulator* simulator_;
#endif

    PerIsolateThreadData* next_;
    PerIsolateThreadData* prev_;

    friend class Isolate;
    friend class ThreadDataTable;
    friend class EntryStackItem;

    DISALLOW_COPY_AND_ASSIGN(PerIsolateThreadData);
  };


  enum AddressId {
493 494
#define DECLARE_ENUM(CamelName, hacker_name) k##CamelName##Address,
    FOR_EACH_ISOLATE_ADDRESS_NAME(DECLARE_ENUM)
495
#undef DECLARE_ENUM
496
    kIsolateAddressCount
497 498
  };

499 500
  static void InitializeOncePerProcess();

501 502 503 504
  // Returns the PerIsolateThreadData for the current thread (or NULL if one is
  // not currently set).
  static PerIsolateThreadData* CurrentPerIsolateThreadData() {
    return reinterpret_cast<PerIsolateThreadData*>(
505
        base::Thread::GetThreadLocal(per_isolate_thread_data_key_));
506 507 508 509
  }

  // Returns the isolate inside which the current thread is running.
  INLINE(static Isolate* Current()) {
510
    DCHECK(base::NoBarrier_Load(&isolate_key_created_) == 1);
vitalyr@chromium.org's avatar
vitalyr@chromium.org committed
511
    Isolate* isolate = reinterpret_cast<Isolate*>(
512
        base::Thread::GetExistingThreadLocal(isolate_key_));
513
    DCHECK(isolate != NULL);
514 515 516
    return isolate;
  }

517 518 519 520 521 522 523
  // Usually called by Init(), but can be called early e.g. to allow
  // testing components that require logging but not the whole
  // isolate.
  //
  // Safe to call more than once.
  void InitializeLoggingAndCounters();

524 525 526 527 528 529 530 531 532 533
  bool Init(Deserializer* des);

  // True if at least one thread Enter'ed this isolate.
  bool IsInUse() { return entry_stack_ != NULL; }

  // Destroys the non-default isolates.
  // Sets default isolate into "has_been_disposed" state rather then destroying,
  // for legacy API reasons.
  void TearDown();

534 535
  static void GlobalTearDown();

536 537
  void ClearSerializerData();

538 539 540 541
  // Find the PerThread for this particular (isolate, thread) combination
  // If one does not yet exist, return null.
  PerIsolateThreadData* FindPerThreadDataForThisThread();

542 543 544 545
  // Find the PerThread for given (isolate, thread) combination
  // If one does not yet exist, return null.
  PerIsolateThreadData* FindPerThreadDataForThread(ThreadId thread_id);

546 547 548 549
  // Discard the PerThread for this particular (isolate, thread) combination
  // If one does not yet exist, no-op.
  void DiscardPerThreadDataForThisThread();

550 551 552
  // Returns the key used to store the pointer to the current isolate.
  // Used internally for V8 threads that do not execute JavaScript but still
  // are part of the domain of an isolate (like the context switcher).
553
  static base::Thread::LocalStorageKey isolate_key() {
554 555
    return isolate_key_;
  }
556 557

  // Returns the key used to store process-wide thread IDs.
558
  static base::Thread::LocalStorageKey thread_id_key() {
559 560
    return thread_id_key_;
  }
561

562
  static base::Thread::LocalStorageKey per_isolate_thread_data_key();
563 564

  // Mutex for serializing access to break control structures.
565
  base::RecursiveMutex* break_access() { return &break_access_; }
566 567 568 569 570

  Address get_address_from_id(AddressId id);

  // Access to top context (where the current function object was created).
  Context* context() { return thread_local_top_.context_; }
571
  inline void set_context(Context* context);
572 573
  Context** context_address() { return &thread_local_top_.context_; }

574
  THREAD_LOCAL_TOP_ACCESSOR(SaveContext*, save_context)
575 576

  // Access to current thread id.
577
  THREAD_LOCAL_TOP_ACCESSOR(ThreadId, thread_id)
578 579

  // Interface to pending exception.
580 581 582
  inline Object* pending_exception();
  inline void set_pending_exception(Object* exception_obj);
  inline void clear_pending_exception();
583

584
  THREAD_LOCAL_TOP_ADDRESS(Object*, pending_exception)
585

586
  inline bool has_pending_exception();
587

588 589 590 591 592 593
  THREAD_LOCAL_TOP_ADDRESS(Context*, pending_handler_context)
  THREAD_LOCAL_TOP_ADDRESS(Code*, pending_handler_code)
  THREAD_LOCAL_TOP_ADDRESS(intptr_t, pending_handler_offset)
  THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_fp)
  THREAD_LOCAL_TOP_ADDRESS(Address, pending_handler_sp)

594 595
  THREAD_LOCAL_TOP_ACCESSOR(bool, external_caught_exception)

596
  v8::TryCatch* try_catch_handler() {
597
    return thread_local_top_.try_catch_handler();
598 599 600 601
  }
  bool* external_caught_exception_address() {
    return &thread_local_top_.external_caught_exception_;
  }
602

603
  THREAD_LOCAL_TOP_ADDRESS(Object*, scheduled_exception)
604

605
  inline void clear_pending_message();
606 607 608 609
  Address pending_message_obj_address() {
    return reinterpret_cast<Address>(&thread_local_top_.pending_message_obj_);
  }

610 611 612
  inline Object* scheduled_exception();
  inline bool has_scheduled_exception();
  inline void clear_scheduled_exception();
613

614 615
  bool IsJavaScriptHandlerOnTop(Object* exception);
  bool IsExternalHandlerOnTop(Object* exception);
616

617
  inline bool is_catchable_by_javascript(Object* exception);
618
  inline bool is_catchable_by_wasm(Object* exception);
619 620 621 622 623 624

  // JS execution stack (see frames.h).
  static Address c_entry_fp(ThreadLocalTop* thread) {
    return thread->c_entry_fp_;
  }
  static Address handler(ThreadLocalTop* thread) { return thread->handler_; }
625
  Address c_function() { return thread_local_top_.c_function_; }
626 627 628 629 630

  inline Address* c_entry_fp_address() {
    return &thread_local_top_.c_entry_fp_;
  }
  inline Address* handler_address() { return &thread_local_top_.handler_; }
631 632 633
  inline Address* c_function_address() {
    return &thread_local_top_.c_function_;
  }
634

635 636 637
  // Bottom JS entry.
  Address js_entry_sp() {
    return thread_local_top_.js_entry_sp_;
638 639 640 641 642 643
  }
  inline Address* js_entry_sp_address() {
    return &thread_local_top_.js_entry_sp_;
  }

  // Returns the global object of the current context. It could be
644
  // a builtin object, or a JS global object.
645
  inline Handle<JSGlobalObject> global_object();
646 647

  // Returns the global proxy object of the current context.
648
  inline Handle<JSObject> global_proxy();
649 650 651 652 653 654 655 656 657 658

  static int ArchiveSpacePerThread() { return sizeof(ThreadLocalTop); }
  void FreeThreadResources() { thread_local_top_.Free(); }

  // This method is called by the api after operations that may throw
  // exceptions.  If an exception was thrown and not handled by an external
  // handler the exception is scheduled to be rethrown when we return to running
  // JavaScript code.  If an exception is scheduled true is returned.
  bool OptionalRescheduleException(bool is_bottom_call);

659
  // Push and pop a promise and the current try-catch handler.
660
  void PushPromise(Handle<JSObject> promise);
661
  void PopPromise();
662 663 664

  // Return the relevant Promise that a throw/rejection pertains to, based
  // on the contents of the Promise stack
665 666
  Handle<Object> GetPromiseOnStackOnThrow();

667 668 669
  // Heuristically guess whether a Promise is handled by user catch handler
  bool PromiseHasUserDefinedRejectHandler(Handle<Object> promise);

670 671
  class ExceptionScope {
   public:
672 673
    // Scope currently can only be used for regular exceptions,
    // not termination exception.
674 675
    inline explicit ExceptionScope(Isolate* isolate);
    inline ~ExceptionScope();
676 677 678 679 680 681

   private:
    Isolate* isolate_;
    Handle<Object> pending_exception_;
  };

682 683 684 685 686
  void SetCaptureStackTraceForUncaughtExceptions(
      bool capture,
      int frame_limit,
      StackTrace::StackTraceOptions options);

687 688 689
  void SetAbortOnUncaughtExceptionCallback(
      v8::Isolate::AbortOnUncaughtExceptionCallback callback);

690
  enum PrintStackMode { kPrintStackConcise, kPrintStackVerbose };
691
  void PrintCurrentStackTrace(FILE* out);
692 693 694
  void PrintStack(StringStream* accumulator,
                  PrintStackMode mode = kPrintStackVerbose);
  void PrintStack(FILE* out, PrintStackMode mode = kPrintStackVerbose);
695
  Handle<String> StackTraceString();
696 697
  NO_INLINE(void PushStackTraceAndDie(unsigned int magic, void* ptr1,
                                      void* ptr2, unsigned int magic2));
698 699 700
  Handle<JSArray> CaptureCurrentStackTrace(
      int frame_limit,
      StackTrace::StackTraceOptions options);
701
  Handle<Object> CaptureSimpleStackTrace(Handle<JSReceiver> error_object,
702
                                         FrameSkipMode mode,
703
                                         Handle<Object> caller);
704 705 706
  MaybeHandle<JSReceiver> CaptureAndSetDetailedStackTrace(
      Handle<JSReceiver> error_object);
  MaybeHandle<JSReceiver> CaptureAndSetSimpleStackTrace(
707 708
      Handle<JSReceiver> error_object, FrameSkipMode mode,
      Handle<Object> caller);
709
  Handle<JSArray> GetDetailedStackTrace(Handle<JSObject> error_object);
710

711
  // Returns if the given context may access the given global object. If
712 713
  // the result is false, the pending exception is guaranteed to be
  // set.
714
  bool MayAccess(Handle<Context> accessing_context, Handle<JSObject> receiver);
715

716
  void SetFailedAccessCheckCallback(v8::FailedAccessCheckCallback callback);
717
  void ReportFailedAccessCheck(Handle<JSObject> receiver);
718 719

  // Exception throwing support. The caller should use the result
jwolfe's avatar
jwolfe committed
720
  // of Throw() as its return value.
721
  Object* Throw(Object* exception, MessageLocation* location = NULL);
722
  Object* ThrowIllegalOperation();
723 724 725 726 727 728 729 730

  template <typename T>
  MUST_USE_RESULT MaybeHandle<T> Throw(Handle<Object> exception,
                                       MessageLocation* location = NULL) {
    Throw(*exception, location);
    return MaybeHandle<T>();
  }

731 732
  // Re-throw an exception.  This involves no error reporting since error
  // reporting was handled when the exception was thrown originally.
733
  Object* ReThrow(Object* exception);
734 735 736

  // Find the correct handler for the current pending exception. This also
  // clears and returns the current pending exception.
737
  Object* UnwindAndFindHandler();
738

739
  // Tries to predict whether an exception will be caught. Note that this can
740
  // only produce an estimate, because it is undecidable whether a finally
741
  // clause will consume or re-throw an exception.
742 743 744 745
  enum CatchType {
    NOT_CAUGHT,
    CAUGHT_BY_JAVASCRIPT,
    CAUGHT_BY_EXTERNAL,
746 747 748
    CAUGHT_BY_DESUGARING,
    CAUGHT_BY_PROMISE,
    CAUGHT_BY_ASYNC_AWAIT
749
  };
750
  CatchType PredictExceptionCatcher();
751

752
  void ScheduleThrow(Object* exception);
753 754 755
  // Re-set pending message, script and positions reported to the TryCatch
  // back to the TLS for re-use when rethrowing.
  void RestorePendingMessageFromTryCatch(v8::TryCatch* handler);
756 757
  // Un-schedule an exception that was caught by a TryCatch handler.
  void CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler);
758
  void ReportPendingMessages();
759 760
  // Return pending location if any or unfilled structure.
  MessageLocation GetMessageLocation();
761 762

  // Promote a scheduled exception to pending. Asserts has_scheduled_exception.
763
  Object* PromoteScheduledException();
764 765

  // Attempts to compute the current source location, storing the
766 767 768
  // result in the target out parameter. The source location is attached to a
  // Message object as the location which should be shown to the user. It's
  // typically the top-most meaningful location on the stack.
769
  bool ComputeLocation(MessageLocation* target);
770 771
  bool ComputeLocationFromException(MessageLocation* target,
                                    Handle<Object> exception);
772
  bool ComputeLocationFromStackTrace(MessageLocation* target,
773 774 775 776
                                     Handle<Object> exception);

  Handle<JSMessageObject> CreateMessage(Handle<Object> exception,
                                        MessageLocation* location);
777 778

  // Out of resource exception helpers.
779 780
  Object* StackOverflow();
  Object* TerminateExecution();
781
  void CancelTerminateExecution();
782

783 784
  void RequestInterrupt(InterruptCallback callback, void* data);
  void InvokeApiInterruptCallbacks();
785

786 787 788 789 790 791
  // Administration
  void Iterate(ObjectVisitor* v);
  void Iterate(ObjectVisitor* v, ThreadLocalTop* t);
  char* Iterate(ObjectVisitor* v, char* t);
  void IterateThread(ThreadVisitor* v, char* t);

792
  // Returns the current native context.
793 794
  inline Handle<Context> native_context();
  inline Context* raw_native_context();
795

796 797 798
  // Returns the native context of the calling JavaScript code.  That
  // is, the native context of the top-most JavaScript frame.
  Handle<Context> GetCallingNativeContext();
799 800 801 802 803 804 805 806 807 808 809 810 811

  void RegisterTryCatchHandler(v8::TryCatch* that);
  void UnregisterTryCatchHandler(v8::TryCatch* that);

  char* ArchiveThread(char* to);
  char* RestoreThread(char* from);

  static const int kUC16AlphabetSize = 256;  // See StringSearchBase.
  static const int kBMMaxShift = 250;        // See StringSearchBase.

  // Accessors.
#define GLOBAL_ACCESSOR(type, name, initialvalue)                       \
  inline type name() const {                                            \
812
    DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
813 814 815
    return name##_;                                                     \
  }                                                                     \
  inline void set_##name(type value) {                                  \
816
    DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
817 818 819 820 821 822 823
    name##_ = value;                                                    \
  }
  ISOLATE_INIT_LIST(GLOBAL_ACCESSOR)
#undef GLOBAL_ACCESSOR

#define GLOBAL_ARRAY_ACCESSOR(type, name, length)                       \
  inline type* name() {                                                 \
824
    DCHECK(OFFSET_OF(Isolate, name##_) == name##_debug_offset_);        \
825 826 827 828 829
    return &(name##_)[0];                                               \
  }
  ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_ACCESSOR)
#undef GLOBAL_ARRAY_ACCESSOR

830 831 832
#define NATIVE_CONTEXT_FIELD_ACCESSOR(index, type, name) \
  inline Handle<type> name();                            \
  inline bool is_##name(type* value);
833 834
  NATIVE_CONTEXT_FIELDS(NATIVE_CONTEXT_FIELD_ACCESSOR)
#undef NATIVE_CONTEXT_FIELD_ACCESSOR
835 836

  Bootstrapper* bootstrapper() { return bootstrapper_; }
837 838 839
  Counters* counters() {
    // Call InitializeLoggingAndCounters() if logging is needed before
    // the isolate is fully initialized.
840
    DCHECK(counters_ != NULL);
841 842
    return counters_;
  }
843 844
  RuntimeProfiler* runtime_profiler() { return runtime_profiler_; }
  CompilationCache* compilation_cache() { return compilation_cache_; }
845 846 847
  Logger* logger() {
    // Call InitializeLoggingAndCounters() if logging is needed before
    // the isolate is fully initialized.
848
    DCHECK(logger_ != NULL);
849 850
    return logger_;
  }
851 852
  StackGuard* stack_guard() { return &stack_guard_; }
  Heap* heap() { return &heap_; }
853
  StatsTable* stats_table();
854 855
  StubCache* load_stub_cache() { return load_stub_cache_; }
  StubCache* store_stub_cache() { return store_stub_cache_; }
856
  CodeAgingHelper* code_aging_helper() { return code_aging_helper_; }
857
  DeoptimizerData* deoptimizer_data() { return deoptimizer_data_; }
858 859 860 861
  bool deoptimizer_lazy_throw() const { return deoptimizer_lazy_throw_; }
  void set_deoptimizer_lazy_throw(bool value) {
    deoptimizer_lazy_throw_ = value;
  }
862
  ThreadLocalTop* thread_local_top() { return &thread_local_top_; }
jarin@chromium.org's avatar
jarin@chromium.org committed
863 864 865
  MaterializedObjectStore* materialized_object_store() {
    return materialized_object_store_;
  }
866

867 868 869 870
  ContextSlotCache* context_slot_cache() {
    return context_slot_cache_;
  }

871 872 873 874
  DescriptorLookupCache* descriptor_lookup_cache() {
    return descriptor_lookup_cache_;
  }

875 876
  HandleScopeData* handle_scope_data() { return &handle_scope_data_; }

877
  HandleScopeImplementer* handle_scope_implementer() {
878
    DCHECK(handle_scope_implementer_);
879 880 881
    return handle_scope_implementer_;
  }

882 883
  UnicodeCache* unicode_cache() {
    return unicode_cache_;
884 885
  }

886 887 888
  InnerPointerToCodeCache* inner_pointer_to_code_cache() {
    return inner_pointer_to_code_cache_;
  }
889 890 891

  GlobalHandles* global_handles() { return global_handles_; }

892 893
  EternalHandles* eternal_handles() { return eternal_handles_; }

894 895 896 897 898 899 900 901 902 903 904 905 906 907 908 909 910 911 912 913 914
  ThreadManager* thread_manager() { return thread_manager_; }

  unibrow::Mapping<unibrow::Ecma262UnCanonicalize>* jsregexp_uncanonicalize() {
    return &jsregexp_uncanonicalize_;
  }

  unibrow::Mapping<unibrow::CanonicalizationRange>* jsregexp_canonrange() {
    return &jsregexp_canonrange_;
  }

  RuntimeState* runtime_state() { return &runtime_state_; }

  Builtins* builtins() { return &builtins_; }

  unibrow::Mapping<unibrow::Ecma262Canonicalize>*
      regexp_macro_assembler_canonicalize() {
    return &regexp_macro_assembler_canonicalize_;
  }

  RegExpStack* regexp_stack() { return regexp_stack_; }

915 916
  List<int>* regexp_indices() { return &regexp_indices_; }

917 918
  unibrow::Mapping<unibrow::Ecma262Canonicalize>*
      interp_canonicalize_mapping() {
919
    return &regexp_macro_assembler_canonicalize_;
920 921
  }

922
  Debug* debug() { return debug_; }
923

924
  bool* is_profiling_address() { return &is_profiling_; }
925 926 927
  CodeEventDispatcher* code_event_dispatcher() const {
    return code_event_dispatcher_.get();
  }
928
  HeapProfiler* heap_profiler() const { return heap_profiler_; }
929

930 931 932 933 934 935 936 937 938 939
#ifdef DEBUG
  HistogramInfo* heap_histograms() { return heap_histograms_; }

  JSObject::SpillInformation* js_spill_information() {
    return &js_spill_information_;
  }
#endif

  Factory* factory() { return reinterpret_cast<Factory*>(this); }

940
  static const int kJSRegexpStaticOffsetsVectorSize = 128;
941

942
  THREAD_LOCAL_TOP_ACCESSOR(ExternalCallbackScope*, external_callback_scope)
943

944
  THREAD_LOCAL_TOP_ACCESSOR(StateTag, current_vm_state)
945

946
  void SetData(uint32_t slot, void* data) {
947
    DCHECK(slot < Internals::kNumIsolateDataSlots);
948 949 950
    embedder_data_[slot] = data;
  }
  void* GetData(uint32_t slot) {
951
    DCHECK(slot < Internals::kNumIsolateDataSlots);
952 953
    return embedder_data_[slot];
  }
954

955
  bool serializer_enabled() const { return serializer_enabled_; }
956 957 958
  bool snapshot_available() const {
    return snapshot_blob_ != NULL && snapshot_blob_->raw_size != 0;
  }
959

960 961 962
  bool IsDead() { return has_fatal_error_; }
  void SignalFatalError() { has_fatal_error_ = true; }

963
  bool use_crankshaft() const;
964

965 966
  bool initialized_from_snapshot() { return initialized_from_snapshot_; }

967 968
  bool NeedsSourcePositionsForProfiling() const;

969
  double time_millis_since_init() {
970
    return heap_.MonotonicallyIncreasingTimeInMs() - time_millis_at_init_;
971 972
  }

973 974 975 976 977 978 979 980 981 982 983
  DateCache* date_cache() {
    return date_cache_;
  }

  void set_date_cache(DateCache* date_cache) {
    if (date_cache != date_cache_) {
      delete date_cache_;
    }
    date_cache_ = date_cache;
  }

984
  Map* get_initial_js_array_map(ElementsKind kind);
985

986 987
  static const int kProtectorValid = 1;
  static const int kProtectorInvalid = 0;
988

989
  bool IsFastArrayConstructorPrototypeChainIntact();
990
  inline bool IsArraySpeciesLookupChainIntact();
991
  inline bool IsHasInstanceLookupChainIntact();
992
  bool IsIsConcatSpreadableLookupChainIntact();
993
  bool IsIsConcatSpreadableLookupChainIntact(JSReceiver* receiver);
994
  inline bool IsStringLengthOverflowIntact();
995
  inline bool IsArrayIteratorLookupChainIntact();
996

997 998 999
  // Avoid deopt loops if fast Array Iterators migrate to slow Array Iterators.
  inline bool IsFastArrayIterationIntact();

1000 1001 1002
  // Make sure we do check for neutered array buffers.
  inline bool IsArrayBufferNeuteringIntact();

1003 1004 1005 1006 1007
  // On intent to set an element in object, make sure that appropriate
  // notifications occur if the set is on the elements of the array or
  // object prototype. Also ensure that changes to prototype chain between
  // Array and Object fire notifications.
  void UpdateArrayProtectorOnSetElement(Handle<JSObject> object);
1008 1009 1010
  void UpdateArrayProtectorOnSetLength(Handle<JSObject> object) {
    UpdateArrayProtectorOnSetElement(object);
  }
1011 1012 1013 1014 1015 1016
  void UpdateArrayProtectorOnSetPrototype(Handle<JSObject> object) {
    UpdateArrayProtectorOnSetElement(object);
  }
  void UpdateArrayProtectorOnNormalizeElements(Handle<JSObject> object) {
    UpdateArrayProtectorOnSetElement(object);
  }
1017
  void InvalidateArraySpeciesProtector();
1018
  void InvalidateHasInstanceProtector();
1019
  void InvalidateIsConcatSpreadableProtector();
1020
  void InvalidateStringLengthOverflowProtector();
1021
  void InvalidateArrayIteratorProtector();
1022
  void InvalidateArrayBufferNeuteringProtector();
1023 1024 1025 1026

  // Returns true if array is the initial array prototype in any native context.
  bool IsAnyInitialArrayPrototype(Handle<JSArray> array);

1027 1028
  V8_EXPORT_PRIVATE CallInterfaceDescriptorData* call_descriptor_data(
      int index);
1029

1030 1031
  AccessCompilerData* access_compiler_data() { return access_compiler_data_; }

1032 1033 1034 1035
  void IterateDeferredHandles(ObjectVisitor* visitor);
  void LinkDeferredHandles(DeferredHandles* deferred_handles);
  void UnlinkDeferredHandles(DeferredHandles* deferred_handles);

1036 1037 1038 1039
#ifdef DEBUG
  bool IsDeferredHandle(Object** location);
#endif  // DEBUG

1040 1041
  bool concurrent_recompilation_enabled() {
    // Thread is only available with flag enabled.
1042
    DCHECK(optimizing_compile_dispatcher_ == NULL ||
1043
           FLAG_concurrent_recompilation);
1044
    return optimizing_compile_dispatcher_ != NULL;
1045 1046
  }

1047 1048
  OptimizingCompileDispatcher* optimizing_compile_dispatcher() {
    return optimizing_compile_dispatcher_;
1049 1050
  }

1051 1052
  int id() const { return static_cast<int>(id_); }

1053
  HStatistics* GetHStatistics();
1054
  CompilationStatistics* GetTurboStatistics();
1055
  HTracer* GetHTracer();
1056
  CodeTracer* GetCodeTracer();
1057

1058 1059
  void DumpAndResetCompilationStats();

1060 1061 1062 1063 1064
  FunctionEntryHook function_entry_hook() { return function_entry_hook_; }
  void set_function_entry_hook(FunctionEntryHook function_entry_hook) {
    function_entry_hook_ = function_entry_hook;
  }

1065 1066
  void* stress_deopt_count_address() { return &stress_deopt_count_; }

1067
  V8_EXPORT_PRIVATE base::RandomNumberGenerator* random_number_generator();
1068

1069 1070 1071 1072
  // Generates a random number that is non-zero when masked
  // with the provided mask.
  int GenerateIdentityHash(uint32_t mask);

1073
  // Given an address occupied by a live code object, return that object.
1074
  Code* FindCodeObject(Address a);
1075

1076 1077 1078 1079 1080 1081 1082 1083
  int NextOptimizationId() {
    int id = next_optimization_id_++;
    if (!Smi::IsValid(next_optimization_id_)) {
      next_optimization_id_ = 0;
    }
    return id;
  }

1084 1085 1086 1087
  void AddCallCompletedCallback(CallCompletedCallback callback);
  void RemoveCallCompletedCallback(CallCompletedCallback callback);
  void FireCallCompletedCallback();

1088 1089
  void AddBeforeCallEnteredCallback(BeforeCallEnteredCallback callback);
  void RemoveBeforeCallEnteredCallback(BeforeCallEnteredCallback callback);
1090
  inline void FireBeforeCallEnteredCallback();
1091

1092 1093 1094 1095
  void AddMicrotasksCompletedCallback(MicrotasksCompletedCallback callback);
  void RemoveMicrotasksCompletedCallback(MicrotasksCompletedCallback callback);
  void FireMicrotasksCompletedCallback();

1096 1097 1098 1099
  void SetPromiseRejectCallback(PromiseRejectCallback callback);
  void ReportPromiseReject(Handle<JSObject> promise, Handle<Object> value,
                           v8::PromiseRejectEvent event);

1100 1101 1102
  void PromiseReactionJob(Handle<PromiseReactionJobInfo> info,
                          MaybeHandle<Object>* result,
                          MaybeHandle<Object>* maybe_exception);
1103
  void PromiseResolveThenableJob(Handle<PromiseResolveThenableJobInfo> info,
1104 1105
                                 MaybeHandle<Object>* result,
                                 MaybeHandle<Object>* maybe_exception);
1106
  void EnqueueMicrotask(Handle<Object> microtask);
1107
  void RunMicrotasks();
1108
  bool IsRunningMicrotasks() const { return is_running_microtasks_; }
1109

1110 1111 1112
  Handle<Symbol> SymbolFor(Heap::RootListIndex dictionary_index,
                           Handle<String> name, bool private_symbol);

1113 1114 1115
  void SetUseCounterCallback(v8::Isolate::UseCounterCallback callback);
  void CountUsage(v8::Isolate::UseCounterFeature feature);

1116 1117 1118
  BasicBlockProfiler* GetOrCreateBasicBlockProfiler();
  BasicBlockProfiler* basic_block_profiler() { return basic_block_profiler_; }

1119
  std::string GetTurboCfgFileName();
1120

1121 1122 1123 1124
#if TRACE_MAPS
  int GetNextUniqueSharedFunctionInfoId() { return next_unique_sfi_id_++; }
#endif

1125 1126
  Address promise_hook_or_debug_is_active_address() {
    return reinterpret_cast<Address>(&promise_hook_or_debug_is_active_);
1127
  }
1128 1129 1130

  void DebugStateUpdated();

1131 1132 1133
  void SetPromiseHook(PromiseHook hook);
  void RunPromiseHook(PromiseHookType type, Handle<JSPromise> promise,
                      Handle<Object> parent);
1134

1135 1136 1137 1138 1139 1140 1141
  // Support for dynamically disabling tail call elimination.
  Address is_tail_call_elimination_enabled_address() {
    return reinterpret_cast<Address>(&is_tail_call_elimination_enabled_);
  }
  bool is_tail_call_elimination_enabled() const {
    return is_tail_call_elimination_enabled_;
  }
1142
  void SetTailCallEliminationEnabled(bool enabled);
1143

1144 1145 1146
  void AddDetachedContext(Handle<Context> context);
  void CheckDetachedContextsAfterGC();

1147 1148
  List<Object*>* partial_snapshot_cache() { return &partial_snapshot_cache_; }

1149 1150 1151 1152 1153 1154 1155
  void set_array_buffer_allocator(v8::ArrayBuffer::Allocator* allocator) {
    array_buffer_allocator_ = allocator;
  }
  v8::ArrayBuffer::Allocator* array_buffer_allocator() const {
    return array_buffer_allocator_;
  }

binji's avatar
binji committed
1156 1157
  FutexWaitListNode* futex_wait_list_node() { return &futex_wait_list_node_; }

1158 1159 1160
  CancelableTaskManager* cancelable_task_manager() {
    return cancelable_task_manager_;
  }
1161

1162 1163 1164 1165
  AstStringConstants* ast_string_constants() const {
    return ast_string_constants_;
  }

1166 1167
  interpreter::Interpreter* interpreter() const { return interpreter_; }

1168
  AccountingAllocator* allocator() { return allocator_; }
1169

1170 1171
  CompilerDispatcher* compiler_dispatcher() const {
    return compiler_dispatcher_;
1172 1173
  }

1174 1175 1176 1177 1178 1179
  // Clear all optimized code stored in native contexts.
  void ClearOSROptimizedCode();

  // Ensure that a particular optimized code is evicted.
  void EvictOSROptimizedCode(Code* code, const char* reason);

1180 1181
  bool IsInAnyContext(Object* object, uint32_t index);

hpayer's avatar
hpayer committed
1182 1183
  void SetRAILMode(RAILMode rail_mode);

1184 1185 1186 1187
  RAILMode rail_mode() { return rail_mode_.Value(); }

  double LoadStartTimeMs();

1188 1189 1190 1191 1192 1193
  void IsolateInForegroundNotification();

  void IsolateInBackgroundNotification();

  bool IsIsolateInBackground() { return is_isolate_in_background_; }

1194
  PRINTF_FORMAT(2, 3) void PrintWithTimestamp(const char* format, ...);
1195

1196 1197 1198 1199
#ifdef USE_SIMULATOR
  base::Mutex* simulator_i_cache_mutex() { return &simulator_i_cache_mutex_; }
#endif

1200 1201 1202
  void set_allow_atomics_wait(bool set) { allow_atomics_wait_ = set; }
  bool allow_atomics_wait() { return allow_atomics_wait_; }

1203
 protected:
1204
  explicit Isolate(bool enable_serializer);
1205
  bool IsArrayOrObjectPrototype(Object* object);
1206

1207
 private:
1208 1209 1210
  friend struct GlobalState;
  friend struct InitializeGlobalState;

1211 1212 1213
  // These fields are accessed through the API, offsets must be kept in sync
  // with v8::internal::Internals (in include/v8.h) constants. This is also
  // verified in Isolate::Init() using runtime checks.
1214
  void* embedder_data_[Internals::kNumIsolateDataSlots];
1215 1216
  Heap heap_;

1217 1218 1219 1220 1221 1222 1223 1224 1225 1226
  // The per-process lock should be acquired before the ThreadDataTable is
  // modified.
  class ThreadDataTable {
   public:
    ThreadDataTable();
    ~ThreadDataTable();

    PerIsolateThreadData* Lookup(Isolate* isolate, ThreadId thread_id);
    void Insert(PerIsolateThreadData* data);
    void Remove(PerIsolateThreadData* data);
1227
    void RemoveAllThreads(Isolate* isolate);
1228 1229 1230 1231 1232 1233 1234 1235 1236

   private:
    PerIsolateThreadData* list_;
  };

  // These items form a stack synchronously with threads Enter'ing and Exit'ing
  // the Isolate. The top of the stack points to a thread which is currently
  // running the Isolate. When the stack is empty, the Isolate is considered
  // not entered by any thread and can be Disposed.
thakis's avatar
thakis committed
1237
  // If the same thread enters the Isolate more than once, the entry_count_
1238 1239 1240 1241 1242 1243 1244 1245 1246 1247 1248 1249 1250 1251 1252 1253
  // is incremented rather then a new item pushed to the stack.
  class EntryStackItem {
   public:
    EntryStackItem(PerIsolateThreadData* previous_thread_data,
                   Isolate* previous_isolate,
                   EntryStackItem* previous_item)
        : entry_count(1),
          previous_thread_data(previous_thread_data),
          previous_isolate(previous_isolate),
          previous_item(previous_item) { }

    int entry_count;
    PerIsolateThreadData* previous_thread_data;
    Isolate* previous_isolate;
    EntryStackItem* previous_item;

1254
   private:
1255 1256 1257
    DISALLOW_COPY_AND_ASSIGN(EntryStackItem);
  };

1258
  static base::LazyMutex thread_data_table_mutex_;
1259

1260 1261 1262
  static base::Thread::LocalStorageKey per_isolate_thread_data_key_;
  static base::Thread::LocalStorageKey isolate_key_;
  static base::Thread::LocalStorageKey thread_id_key_;
1263 1264
  static ThreadDataTable* thread_data_table_;

1265
  // A global counter for all generated Isolates, might overflow.
1266
  static base::Atomic32 isolate_counter_;
1267

1268 1269 1270 1271
#if DEBUG
  static base::Atomic32 isolate_key_created_;
#endif

1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300
  void Deinit();

  static void SetIsolateThreadLocals(Isolate* isolate,
                                     PerIsolateThreadData* data);

  // Find the PerThread for this particular (isolate, thread) combination.
  // If one does not yet exist, allocate a new one.
  PerIsolateThreadData* FindOrAllocatePerThreadDataForThisThread();

  // Initializes the current thread to run this Isolate.
  // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
  // at the same time, this should be prevented using external locking.
  void Enter();

  // Exits the current thread. The previosuly entered Isolate is restored
  // for the thread.
  // Not thread-safe. Multiple threads should not Enter/Exit the same isolate
  // at the same time, this should be prevented using external locking.
  void Exit();

  void InitializeThreadLocal();

  void MarkCompactPrologue(bool is_compacting,
                           ThreadLocalTop* archived_thread_data);
  void MarkCompactEpilogue(bool is_compacting,
                           ThreadLocalTop* archived_thread_data);

  void FillCache();

1301 1302 1303 1304 1305
  // Propagate pending exception message to the v8::TryCatch.
  // If there is no external try-catch or message was successfully propagated,
  // then return true.
  bool PropagatePendingExceptionToExternalTryCatch();

1306 1307 1308 1309
  // Remove per-frame stored materialized objects when we are unwinding
  // the frame.
  void RemoveMaterializedObjectsOnUnwind(StackFrame* frame);

1310 1311
  void RunMicrotasksInternal();

hpayer's avatar
hpayer committed
1312 1313 1314 1315 1316 1317 1318 1319 1320 1321 1322 1323 1324 1325
  const char* RAILModeName(RAILMode rail_mode) const {
    switch (rail_mode) {
      case PERFORMANCE_RESPONSE:
        return "RESPONSE";
      case PERFORMANCE_ANIMATION:
        return "ANIMATION";
      case PERFORMANCE_IDLE:
        return "IDLE";
      case PERFORMANCE_LOAD:
        return "LOAD";
    }
    return "";
  }

1326 1327 1328 1329
  // TODO(alph): Remove along with the deprecated GetCpuProfiler().
  friend v8::CpuProfiler* v8::Isolate::GetCpuProfiler();
  CpuProfiler* cpu_profiler() const { return cpu_profiler_; }

1330
  base::Atomic32 id_;
1331
  EntryStackItem* entry_stack_;
1332 1333
  int stack_trace_nesting_level_;
  StringStream* incomplete_message_;
1334
  Address isolate_addresses_[kIsolateAddressCount + 1];  // NOLINT
1335 1336 1337 1338
  Bootstrapper* bootstrapper_;
  RuntimeProfiler* runtime_profiler_;
  CompilationCache* compilation_cache_;
  Counters* counters_;
1339
  base::RecursiveMutex break_access_;
1340 1341 1342
  Logger* logger_;
  StackGuard stack_guard_;
  StatsTable* stats_table_;
1343 1344
  StubCache* load_stub_cache_;
  StubCache* store_stub_cache_;
1345
  CodeAgingHelper* code_aging_helper_;
1346
  DeoptimizerData* deoptimizer_data_;
1347
  bool deoptimizer_lazy_throw_;
jarin@chromium.org's avatar
jarin@chromium.org committed
1348
  MaterializedObjectStore* materialized_object_store_;
1349 1350 1351 1352
  ThreadLocalTop thread_local_top_;
  bool capture_stack_trace_for_uncaught_exceptions_;
  int stack_trace_for_uncaught_exceptions_frame_limit_;
  StackTrace::StackTraceOptions stack_trace_for_uncaught_exceptions_options_;
1353
  ContextSlotCache* context_slot_cache_;
1354
  DescriptorLookupCache* descriptor_lookup_cache_;
1355
  HandleScopeData handle_scope_data_;
1356
  HandleScopeImplementer* handle_scope_implementer_;
1357
  UnicodeCache* unicode_cache_;
1358
  AccountingAllocator* allocator_;
1359
  InnerPointerToCodeCache* inner_pointer_to_code_cache_;
1360
  GlobalHandles* global_handles_;
1361
  EternalHandles* eternal_handles_;
1362 1363 1364 1365 1366 1367 1368 1369
  ThreadManager* thread_manager_;
  RuntimeState runtime_state_;
  Builtins builtins_;
  unibrow::Mapping<unibrow::Ecma262UnCanonicalize> jsregexp_uncanonicalize_;
  unibrow::Mapping<unibrow::CanonicalizationRange> jsregexp_canonrange_;
  unibrow::Mapping<unibrow::Ecma262Canonicalize>
      regexp_macro_assembler_canonicalize_;
  RegExpStack* regexp_stack_;
1370
  List<int> regexp_indices_;
1371
  DateCache* date_cache_;
1372
  CallInterfaceDescriptorData* call_descriptor_data_;
1373
  AccessCompilerData* access_compiler_data_;
1374
  base::RandomNumberGenerator* random_number_generator_;
1375
  base::AtomicValue<RAILMode> rail_mode_;
1376
  bool promise_hook_or_debug_is_active_;
1377
  PromiseHook promise_hook_;
1378 1379
  base::Mutex rail_mutex_;
  double load_start_time_ms_;
1380

1381 1382 1383
  // Whether the isolate has been created for snapshotting.
  bool serializer_enabled_;

1384 1385 1386
  // True if fatal error has been signaled for this isolate.
  bool has_fatal_error_;

1387 1388 1389
  // True if this isolate was initialized from a snapshot.
  bool initialized_from_snapshot_;

1390 1391 1392
  // True if ES2015 tail call elimination feature is enabled.
  bool is_tail_call_elimination_enabled_;

1393 1394 1395 1396
  // True if the isolate is in background. This flag is used
  // to prioritize between memory usage and latency.
  bool is_isolate_in_background_;

1397 1398 1399
  // Time stamp at initialization.
  double time_millis_at_init_;

1400 1401 1402 1403 1404 1405 1406
#ifdef DEBUG
  // A static array of histogram info for each type.
  HistogramInfo heap_histograms_[LAST_TYPE + 1];
  JSObject::SpillInformation js_spill_information_;
#endif

  Debug* debug_;
1407
  CpuProfiler* cpu_profiler_;
1408
  HeapProfiler* heap_profiler_;
1409
  std::unique_ptr<CodeEventDispatcher> code_event_dispatcher_;
1410
  FunctionEntryHook function_entry_hook_;
1411

1412 1413
  AstStringConstants* ast_string_constants_;

1414 1415
  interpreter::Interpreter* interpreter_;

1416
  CompilerDispatcher* compiler_dispatcher_;
1417

1418 1419 1420
  typedef std::pair<InterruptCallback, void*> InterruptEntry;
  std::queue<InterruptEntry> api_interrupts_queue_;

1421 1422 1423 1424 1425 1426 1427 1428 1429 1430 1431 1432 1433 1434 1435 1436 1437 1438 1439 1440 1441
#define GLOBAL_BACKING_STORE(type, name, initialvalue)                         \
  type name##_;
  ISOLATE_INIT_LIST(GLOBAL_BACKING_STORE)
#undef GLOBAL_BACKING_STORE

#define GLOBAL_ARRAY_BACKING_STORE(type, name, length)                         \
  type name##_[length];
  ISOLATE_INIT_ARRAY_LIST(GLOBAL_ARRAY_BACKING_STORE)
#undef GLOBAL_ARRAY_BACKING_STORE

#ifdef DEBUG
  // This class is huge and has a number of fields controlled by
  // preprocessor defines. Make sure the offsets of these fields agree
  // between compilation units.
#define ISOLATE_FIELD_OFFSET(type, name, ignored)                              \
  static const intptr_t name##_debug_offset_;
  ISOLATE_INIT_LIST(ISOLATE_FIELD_OFFSET)
  ISOLATE_INIT_ARRAY_LIST(ISOLATE_FIELD_OFFSET)
#undef ISOLATE_FIELD_OFFSET
#endif

1442
  DeferredHandles* deferred_handles_head_;
1443
  OptimizingCompileDispatcher* optimizing_compile_dispatcher_;
1444

1445 1446 1447
  // Counts deopt points if deopt_every_n_times is enabled.
  unsigned int stress_deopt_count_;

1448 1449
  int next_optimization_id_;

1450 1451 1452 1453
#if TRACE_MAPS
  int next_unique_sfi_id_;
#endif

1454 1455 1456
  // List of callbacks before a Call starts execution.
  List<BeforeCallEnteredCallback> before_call_entered_callbacks_;

1457 1458 1459
  // List of callbacks when a Call completes.
  List<CallCompletedCallback> call_completed_callbacks_;

1460 1461
  // List of callbacks after microtasks were run.
  List<MicrotasksCompletedCallback> microtasks_completed_callbacks_;
1462
  bool is_running_microtasks_;
1463

1464
  v8::Isolate::UseCounterCallback use_counter_callback_;
1465
  BasicBlockProfiler* basic_block_profiler_;
1466

1467
  List<Object*> partial_snapshot_cache_;
1468

1469 1470
  v8::ArrayBuffer::Allocator* array_buffer_allocator_;

binji's avatar
binji committed
1471 1472
  FutexWaitListNode futex_wait_list_node_;

1473
  CancelableTaskManager* cancelable_task_manager_;
1474

1475 1476 1477
  v8::Isolate::AbortOnUncaughtExceptionCallback
      abort_on_uncaught_exception_callback_;

1478 1479 1480 1481
#ifdef USE_SIMULATOR
  base::Mutex simulator_i_cache_mutex_;
#endif

1482 1483
  bool allow_atomics_wait_;

1484
  friend class ExecutionAccess;
1485
  friend class HandleScopeImplementer;
1486
  friend class HeapTester;
1487
  friend class OptimizingCompileDispatcher;
1488
  friend class SweeperThread;
1489 1490 1491
  friend class ThreadManager;
  friend class Simulator;
  friend class StackGuard;
1492
  friend class ThreadId;
1493 1494
  friend class v8::Isolate;
  friend class v8::Locker;
1495
  friend class v8::Unlocker;
1496
  friend class v8::SnapshotCreator;
1497
  friend v8::StartupData v8::V8::CreateSnapshotDataBlob(const char*);
1498 1499
  friend v8::StartupData v8::V8::WarmUpSnapshotDataBlob(v8::StartupData,
                                                        const char*);
1500 1501 1502 1503 1504

  DISALLOW_COPY_AND_ASSIGN(Isolate);
};


1505 1506 1507 1508
#undef FIELD_ACCESSOR
#undef THREAD_LOCAL_TOP_ACCESSOR


1509 1510
class PromiseOnStack {
 public:
1511 1512
  PromiseOnStack(Handle<JSObject> promise, PromiseOnStack* prev)
      : promise_(promise), prev_(prev) {}
1513 1514 1515 1516 1517 1518 1519 1520 1521
  Handle<JSObject> promise() { return promise_; }
  PromiseOnStack* prev() { return prev_; }

 private:
  Handle<JSObject> promise_;
  PromiseOnStack* prev_;
};


1522 1523 1524 1525 1526
// If the GCC version is 4.1.x or 4.2.x an additional field is added to the
// class as a work around for a bug in the generated code found with these
// versions of GCC. See V8 issue 122 for details.
class SaveContext BASE_EMBEDDED {
 public:
1527 1528
  explicit SaveContext(Isolate* isolate);
  ~SaveContext();
1529 1530 1531 1532 1533

  Handle<Context> context() { return context_; }
  SaveContext* prev() { return prev_; }

  // Returns true if this save context is below a given JavaScript frame.
1534
  bool IsBelowFrame(StandardFrame* frame) {
1535
    return (c_entry_fp_ == 0) || (c_entry_fp_ > frame->sp());
1536 1537 1538
  }

 private:
1539
  Isolate* const isolate_;
1540
  Handle<Context> context_;
1541
  SaveContext* const prev_;
1542
  Address c_entry_fp_;
1543 1544 1545 1546 1547 1548
};


class AssertNoContextChange BASE_EMBEDDED {
#ifdef DEBUG
 public:
1549
  explicit AssertNoContextChange(Isolate* isolate);
1550
  ~AssertNoContextChange() {
1551
    DCHECK(isolate_->context() == *context_);
1552 1553 1554
  }

 private:
1555
  Isolate* isolate_;
1556 1557 1558
  Handle<Context> context_;
#else
 public:
1559
  explicit AssertNoContextChange(Isolate* isolate) { }
1560 1561 1562 1563 1564 1565 1566 1567 1568 1569 1570
#endif
};


class ExecutionAccess BASE_EMBEDDED {
 public:
  explicit ExecutionAccess(Isolate* isolate) : isolate_(isolate) {
    Lock(isolate);
  }
  ~ExecutionAccess() { Unlock(isolate_); }

1571 1572
  static void Lock(Isolate* isolate) { isolate->break_access()->Lock(); }
  static void Unlock(Isolate* isolate) { isolate->break_access()->Unlock(); }
1573 1574

  static bool TryLock(Isolate* isolate) {
1575
    return isolate->break_access()->TryLock();
1576 1577 1578 1579 1580 1581 1582
  }

 private:
  Isolate* isolate_;
};


1583
// Support for checking for stack-overflows.
1584 1585 1586 1587
class StackLimitCheck BASE_EMBEDDED {
 public:
  explicit StackLimitCheck(Isolate* isolate) : isolate_(isolate) { }

1588
  // Use this to check for stack-overflows in C++ code.
1589
  bool HasOverflowed() const {
1590
    StackGuard* stack_guard = isolate_->stack_guard();
1591
    return GetCurrentStackPosition() < stack_guard->real_climit();
1592
  }
1593

1594 1595 1596 1597 1598 1599
  // Use this to check for interrupt request in C++ code.
  bool InterruptRequested() {
    StackGuard* stack_guard = isolate_->stack_guard();
    return GetCurrentStackPosition() < stack_guard->climit();
  }

1600
  // Use this to check for stack-overflow when entering runtime from JS code.
1601
  bool JsHasOverflowed(uintptr_t gap = 0) const;
1602

1603 1604 1605 1606
 private:
  Isolate* isolate_;
};

1607 1608 1609 1610 1611 1612 1613
#define STACK_CHECK(isolate, result_value) \
  do {                                     \
    StackLimitCheck stack_check(isolate);  \
    if (stack_check.HasOverflowed()) {     \
      isolate->StackOverflow();            \
      return result_value;                 \
    }                                      \
1614
  } while (false)
1615 1616 1617 1618 1619 1620 1621

// Support for temporarily postponing interrupts. When the outermost
// postpone scope is left the interrupts will be re-enabled and any
// interrupts that occurred while in the scope will be taken into
// account.
class PostponeInterruptsScope BASE_EMBEDDED {
 public:
1622 1623 1624 1625 1626 1627
  PostponeInterruptsScope(Isolate* isolate,
                          int intercept_mask = StackGuard::ALL_INTERRUPTS)
      : stack_guard_(isolate->stack_guard()),
        intercept_mask_(intercept_mask),
        intercepted_flags_(0) {
    stack_guard_->PushPostponeInterruptsScope(this);
1628 1629 1630
  }

  ~PostponeInterruptsScope() {
1631
    stack_guard_->PopPostponeInterruptsScope();
1632
  }
1633 1634 1635 1636 1637

  // Find the bottom-most scope that intercepts this interrupt.
  // Return whether the interrupt has been intercepted.
  bool Intercept(StackGuard::InterruptFlag flag);

1638 1639
 private:
  StackGuard* stack_guard_;
1640 1641 1642 1643 1644
  int intercept_mask_;
  int intercepted_flags_;
  PostponeInterruptsScope* prev_;

  friend class StackGuard;
1645 1646 1647
};


1648
class CodeTracer final : public Malloced {
1649 1650 1651 1652 1653 1654 1655 1656 1657 1658
 public:
  explicit CodeTracer(int isolate_id)
      : file_(NULL),
        scope_depth_(0) {
    if (!ShouldRedirect()) {
      file_ = stdout;
      return;
    }

    if (FLAG_redirect_code_traces_to == NULL) {
1659 1660
      SNPrintF(filename_,
               "code-%d-%d.asm",
1661
               base::OS::GetCurrentProcessId(),
1662
               isolate_id);
1663
    } else {
1664
      StrNCpy(filename_, FLAG_redirect_code_traces_to, filename_.length());
1665 1666 1667 1668 1669 1670 1671 1672 1673 1674 1675 1676 1677 1678 1679 1680 1681 1682 1683 1684 1685 1686
    }

    WriteChars(filename_.start(), "", 0, false);
  }

  class Scope {
   public:
    explicit Scope(CodeTracer* tracer) : tracer_(tracer) { tracer->OpenFile(); }
    ~Scope() { tracer_->CloseFile();  }

    FILE* file() const { return tracer_->file(); }

   private:
    CodeTracer* tracer_;
  };

  void OpenFile() {
    if (!ShouldRedirect()) {
      return;
    }

    if (file_ == NULL) {
1687
      file_ = base::OS::FOpen(filename_.start(), "ab");
1688 1689 1690 1691 1692 1693 1694 1695 1696 1697 1698 1699 1700 1701 1702 1703 1704 1705 1706 1707 1708 1709 1710 1711 1712 1713 1714
    }

    scope_depth_++;
  }

  void CloseFile() {
    if (!ShouldRedirect()) {
      return;
    }

    if (--scope_depth_ == 0) {
      fclose(file_);
      file_ = NULL;
    }
  }

  FILE* file() const { return file_; }

 private:
  static bool ShouldRedirect() {
    return FLAG_redirect_code_traces;
  }

  EmbeddedVector<char, 128> filename_;
  FILE* file_;
  int scope_depth_;
};
1715

1716 1717
}  // namespace internal
}  // namespace v8
1718 1719

#endif  // V8_ISOLATE_H_