code.h 36.5 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_CODE_H_
#define V8_OBJECTS_CODE_H_

8
#include "src/codegen/handler-table.h"
9
#include "src/contexts.h"
10
#include "src/objects.h"
11
#include "src/objects/fixed-array.h"
12
#include "src/objects/heap-object.h"
13
#include "src/objects/struct.h"
14 15 16 17 18 19 20 21 22

// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

class ByteArray;
class BytecodeArray;
23
class CodeDataContainer;
24
class CodeDesc;
25

26 27 28 29
namespace interpreter {
class Register;
}

30
// Code describes objects with on-the-fly generated machine code.
31
class Code : public HeapObject {
32
 public:
33
  NEVER_READ_ONLY_SPACE
34 35
  // Opaque data type for encapsulating code flags like kind, inline
  // cache state, and arguments count.
36
  using Flags = uint32_t;
37 38 39 40 41 42 43 44

#define CODE_KIND_LIST(V)   \
  V(OPTIMIZED_FUNCTION)     \
  V(BYTECODE_HANDLER)       \
  V(STUB)                   \
  V(BUILTIN)                \
  V(REGEXP)                 \
  V(WASM_FUNCTION)          \
45
  V(WASM_TO_CAPI_FUNCTION)  \
46 47 48 49 50 51 52 53 54 55 56 57 58 59 60
  V(WASM_TO_JS_FUNCTION)    \
  V(JS_TO_WASM_FUNCTION)    \
  V(WASM_INTERPRETER_ENTRY) \
  V(C_WASM_ENTRY)

  enum Kind {
#define DEFINE_CODE_KIND_ENUM(name) name,
    CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
#undef DEFINE_CODE_KIND_ENUM
        NUMBER_OF_KINDS
  };

  static const char* Kind2String(Kind kind);

#ifdef ENABLE_DISASSEMBLER
61
  const char* GetName(Isolate* isolate) const;
62 63
  V8_EXPORT_PRIVATE void Disassemble(const char* name, std::ostream& os,
                                     Address current_pc = kNullAddress);
64 65
#endif

66 67
  // [instruction_size]: Size of the native instructions, including embedded
  // data such as the safepoints table.
68 69
  inline int raw_instruction_size() const;
  inline void set_raw_instruction_size(int value);
70

71 72
  // Returns the size of the native instructions, including embedded
  // data such as the safepoints table. For off-heap code objects
73 74
  // this may differ from instruction_size in that this will return the size of
  // the off-heap instruction stream rather than the on-heap trampoline located
75
  // at instruction_start.
76
  inline int InstructionSize() const;
77
  V8_EXPORT_PRIVATE int OffHeapInstructionSize() const;
78

79
  // [relocation_info]: Code relocation information
80
  DECL_ACCESSORS(relocation_info, ByteArray)
81 82 83

  // This function should be called only from GC.
  void ClearEmbeddedObjects(Heap* heap);
84 85

  // [deoptimization_data]: Array containing data for deopt.
86
  DECL_ACCESSORS(deoptimization_data, FixedArray)
87 88 89 90

  // [source_position_table]: ByteArray for the source positions table or
  // SourcePositionTableWithFrameCache.
  DECL_ACCESSORS(source_position_table, Object)
91
  inline ByteArray SourcePositionTable() const;
92
  inline ByteArray SourcePositionTableIfCollected() const;
93

94
  // [code_data_container]: A container indirection for all mutable fields.
95
  DECL_ACCESSORS(code_data_container, CodeDataContainer)
96

97
  // [next_code_link]: Link for lists of optimized or deoptimized code.
98
  // Note that this field is stored in the {CodeDataContainer} to be mutable.
99 100
  inline Object next_code_link() const;
  inline void set_next_code_link(Object value);
101 102

  // Unchecked accessors to be used during GC.
103
  inline ByteArray unchecked_relocation_info() const;
104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119

  inline int relocation_size() const;

  // [kind]: Access to specific code kind.
  inline Kind kind() const;

  inline bool is_optimized_code() const;
  inline bool is_wasm_code() const;

  // Testers for interpreter builtins.
  inline bool is_interpreter_trampoline_builtin() const;

  // Tells whether the code checks the optimization marker in the function's
  // feedback vector.
  inline bool checks_optimization_marker() const;

120
  // Tells whether the outgoing parameters of this code are tagged pointers.
121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138
  inline bool has_tagged_params() const;

  // [is_turbofanned]: For kind STUB or OPTIMIZED_FUNCTION, tells whether the
  // code object was generated by the TurboFan optimizing compiler.
  inline bool is_turbofanned() const;

  // [can_have_weak_objects]: For kind OPTIMIZED_FUNCTION, tells whether the
  // embedded objects in code should be treated weakly.
  inline bool can_have_weak_objects() const;
  inline void set_can_have_weak_objects(bool value);

  // [builtin_index]: For builtins, tells which builtin index the code object
  // has. The builtin index is a non-negative integer for builtins, and -1
  // otherwise.
  inline int builtin_index() const;
  inline void set_builtin_index(int id);
  inline bool is_builtin() const;

139 140 141
  inline bool has_safepoint_info() const;

  // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
142
  // reserved in the code prologue.
143
  inline int stack_slots() const;
144

145 146
  // [safepoint_table_offset]: If {has_safepoint_info()}, the offset in the
  // instruction stream where the safepoint table starts.
147 148
  inline int safepoint_table_offset() const;
  inline void set_safepoint_table_offset(int offset);
149 150
  int safepoint_table_size() const;
  bool has_safepoint_table() const;
151

152 153 154 155
  // [handler_table_offset]: The offset in the instruction stream where the
  // exception handler table starts.
  inline int handler_table_offset() const;
  inline void set_handler_table_offset(int offset);
156 157 158 159 160 161 162 163 164 165 166 167 168 169
  int handler_table_size() const;
  bool has_handler_table() const;

  // [constant_pool offset]: Offset of the constant pool.
  // Valid for FLAG_enable_embedded_constant_pool only
  inline int constant_pool_offset() const;
  inline void set_constant_pool_offset(int offset);
  int constant_pool_size() const;
  bool has_constant_pool() const;

  // [code_comments_offset]: Offset of the code comment section.
  inline int code_comments_offset() const;
  inline void set_code_comments_offset(int offset);
  inline Address code_comments() const;
170
  V8_EXPORT_PRIVATE int code_comments_size() const;
171
  V8_EXPORT_PRIVATE bool has_code_comments() const;
172 173 174

  // The size of the executable instruction area, without embedded metadata.
  int ExecutableInstructionSize() const;
175

176
  // [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
177
  // the code is going to be deoptimized.
178 179 180
  inline bool marked_for_deoptimization() const;
  inline void set_marked_for_deoptimization(bool flag);

181 182 183 184 185 186
  // [embedded_objects_cleared]: For kind OPTIMIZED_FUNCTION tells whether
  // the embedded objects in the code marked for deoptimization were cleared.
  // Note that embedded_objects_cleared() implies marked_for_deoptimization().
  inline bool embedded_objects_cleared() const;
  inline void set_embedded_objects_cleared(bool flag);

187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203
  // [deopt_already_counted]: For kind OPTIMIZED_FUNCTION tells whether
  // the code was already deoptimized.
  inline bool deopt_already_counted() const;
  inline void set_deopt_already_counted(bool flag);

  // [is_promise_rejection]: For kind BUILTIN tells whether the
  // exception thrown by the code will lead to promise rejection or
  // uncaught if both this and is_exception_caught is set.
  // Use GetBuiltinCatchPrediction to access this.
  inline void set_is_promise_rejection(bool flag);

  // [is_exception_caught]: For kind BUILTIN tells whether the
  // exception thrown by the code will be caught internally or
  // uncaught if both this and is_promise_rejection is set.
  // Use GetBuiltinCatchPrediction to access this.
  inline void set_is_exception_caught(bool flag);

204 205 206 207
  // [is_off_heap_trampoline]: For kind BUILTIN tells whether
  // this is a trampoline to an off-heap builtin.
  inline bool is_off_heap_trampoline() const;

208
  // [constant_pool]: The constant pool for this function.
209
  inline Address constant_pool() const;
210 211 212 213 214 215 216 217 218 219 220 221 222 223

  // Get the safepoint entry for the given pc.
  SafepointEntry GetSafepointEntry(Address pc);

  // The entire code object including its header is copied verbatim to the
  // snapshot so that it can be written in one, fast, memcpy during
  // deserialization. The deserializer will overwrite some pointers, rather
  // like a runtime linker, but the random allocation addresses used in the
  // mksnapshot process would still be present in the unlinked snapshot data,
  // which would make snapshot production non-reproducible. This method wipes
  // out the to-be-overwritten header data for reproducible snapshots.
  inline void WipeOutHeader();

  // Clear uninitialized padding space. This ensures that the snapshot content
224
  // is deterministic. Depending on the V8 build mode there could be no padding.
225 226 227
  inline void clear_padding();
  // Initialize the flags field. Similar to clear_padding above this ensure that
  // the snapshot content is deterministic.
228
  inline void initialize_flags(Kind kind, bool has_unwinding_info,
229 230
                               bool is_turbofanned, int stack_slots,
                               bool is_off_heap_trampoline);
231 232

  // Convert a target address into a code object.
233
  static inline Code GetCodeFromTargetAddress(Address address);
234 235

  // Convert an entry address into an object.
236
  static inline Code GetObjectFromEntryAddress(Address location_of_address);
237 238

  // Returns the address of the first instruction.
239
  inline Address raw_instruction_start() const;
240

241 242 243
  // Returns the address of the first instruction. For off-heap code objects
  // this differs from instruction_start (which would point to the off-heap
  // trampoline instead).
244
  inline Address InstructionStart() const;
245
  V8_EXPORT_PRIVATE Address OffHeapInstructionStart() const;
246

247
  // Returns the address right after the last instruction.
248
  inline Address raw_instruction_end() const;
249

250 251 252
  // Returns the address right after the last instruction. For off-heap code
  // objects this differs from instruction_end (which would point to the
  // off-heap trampoline instead).
253
  inline Address InstructionEnd() const;
254
  V8_EXPORT_PRIVATE Address OffHeapInstructionEnd() const;
255

256 257 258 259 260 261 262 263 264 265 266
  // Returns the size of the instructions, padding, relocation and unwinding
  // information.
  inline int body_size() const;

  // Returns the size of code and its metadata. This includes the size of code
  // relocation information, deoptimization data and handler table.
  inline int SizeIncludingMetadata() const;

  // Returns the address of the first relocation info (read backwards!).
  inline byte* relocation_start() const;

267 268 269
  // Returns the address right after the relocation info (read backwards!).
  inline byte* relocation_end() const;

270 271 272 273 274
  // [has_unwinding_info]: Whether this code object has unwinding information.
  // If it doesn't, unwinding_information_start() will point to invalid data.
  //
  // The body of all code objects has the following layout.
  //
275
  //  +--------------------------+  <-- raw_instruction_start()
276 277 278
  //  |       instructions       |
  //  |           ...            |
  //  +--------------------------+
279 280 281 282 283
  //  |     embedded metadata    |  <-- safepoint_table_offset()
  //  |           ...            |  <-- handler_table_offset()
  //  |                          |  <-- constant_pool_offset()
  //  |                          |  <-- code_comments_offset()
  //  |                          |
284
  //  +--------------------------+  <-- raw_instruction_end()
285
  //
286
  // If has_unwinding_info() is false, raw_instruction_end() points to the first
287 288 289 290 291 292
  // memory location after the end of the code object. Otherwise, the body
  // continues as follows:
  //
  //  +--------------------------+
  //  |    padding to the next   |
  //  |  8-byte aligned address  |
293
  //  +--------------------------+  <-- raw_instruction_end()
294 295 296 297 298 299 300 301 302 303
  //  |   [unwinding_info_size]  |
  //  |        as uint64_t       |
  //  +--------------------------+  <-- unwinding_info_start()
  //  |       unwinding info     |
  //  |            ...           |
  //  +--------------------------+  <-- unwinding_info_end()
  //
  // and unwinding_info_end() points to the first memory location after the end
  // of the code object.
  //
304
  inline bool has_unwinding_info() const;
305 306 307 308 309 310

  // [unwinding_info_size]: Size of the unwinding information.
  inline int unwinding_info_size() const;
  inline void set_unwinding_info_size(int value);

  // Returns the address of the unwinding information, if any.
311
  inline Address unwinding_info_start() const;
312 313

  // Returns the address right after the end of the unwinding information.
314
  inline Address unwinding_info_end() const;
315 316

  // Code entry point.
317
  inline Address entry() const;
318 319

  // Returns true if pc is inside this object's instructions.
320
  inline bool contains(Address pc);
321 322 323 324 325

  // Relocate the code by delta bytes. Called to signal that this code
  // object has been moved by delta bytes.
  void Relocate(intptr_t delta);

326
  // Migrate code from desc without flushing the instruction cache.
327
  void CopyFromNoFlush(Heap* heap, const CodeDesc& desc);
328

329 330
  // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
  // exactly the same size as the RelocInfo in |desc|.
331
  static inline void CopyRelocInfoToByteArray(ByteArray dest,
332 333
                                              const CodeDesc& desc);

334
  // Flushes the instruction cache for the executable instructions of this code
335
  // object. Make sure to call this while the code is still writable.
336 337
  void FlushICache() const;

338 339 340 341 342 343 344 345 346 347
  // Returns the object size for a given body (used for allocation).
  static int SizeFor(int body_size) {
    DCHECK_SIZE_TAG_ALIGNED(body_size);
    return RoundUp(kHeaderSize + body_size, kCodeAlignment);
  }

  // Calculate the size of the code object to report for log events. This takes
  // the layout of the code object into account.
  inline int ExecutableSize() const;

348
  DECL_CAST(Code)
349 350 351 352 353 354 355

  // Dispatched behavior.
  inline int CodeSize() const;

  DECL_PRINTER(Code)
  DECL_VERIFIER(Code)

356
  void PrintDeoptLocation(FILE* out, const char* str, Address pc);
357 358
  bool CanDeoptAt(Address pc);

359 360
  void SetMarkedForDeoptimization(const char* reason);

361 362
  inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();

363
  bool IsIsolateIndependent(Isolate* isolate);
364

365 366
  inline bool CanContainWeakObjects();

367
  inline bool IsWeakObject(HeapObject object);
368

369
  static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
370 371

  // Return true if the function is inlined in the code.
372
  bool Inlines(SharedFunctionInfo sfi);
373

374
  class OptimizedCodeIterator;
375 376

  // Layout description.
377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397
#define CODE_FIELDS(V)                                                    \
  V(kRelocationInfoOffset, kTaggedSize)                                   \
  V(kDeoptimizationDataOffset, kTaggedSize)                               \
  V(kSourcePositionTableOffset, kTaggedSize)                              \
  V(kCodeDataContainerOffset, kTaggedSize)                                \
  /* Data or code not directly visited by GC directly starts here. */     \
  /* The serializer needs to copy bytes starting from here verbatim. */   \
  /* Objects embedded into code is visited via reloc info. */             \
  V(kDataStart, 0)                                                        \
  V(kInstructionSizeOffset, kIntSize)                                     \
  V(kFlagsOffset, kIntSize)                                               \
  V(kSafepointTableOffsetOffset, kIntSize)                                \
  V(kHandlerTableOffsetOffset, kIntSize)                                  \
  V(kConstantPoolOffsetOffset,                                            \
    FLAG_enable_embedded_constant_pool ? kIntSize : 0)                    \
  V(kCodeCommentsOffsetOffset, kIntSize)                                  \
  V(kBuiltinIndexOffset, kIntSize)                                        \
  V(kUnalignedHeaderSize, 0)                                              \
  /* Add padding to align the instruction start following right after */  \
  /* the Code object header. */                                           \
  V(kOptionalPaddingOffset, CODE_POINTER_PADDING(kOptionalPaddingOffset)) \
398 399 400 401
  V(kHeaderSize, 0)

  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
#undef CODE_FIELDS
402

403 404 405
  // This documents the amount of free space we have in each Code object header
  // due to padding for code alignment.
#if V8_TARGET_ARCH_ARM64
406
  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 20 : 0;
407
#elif V8_TARGET_ARCH_MIPS64
408
  static constexpr int kHeaderPaddingSize = 0;
409
#elif V8_TARGET_ARCH_X64
410
  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 20 : 0;
411
#elif V8_TARGET_ARCH_ARM
412
  static constexpr int kHeaderPaddingSize = 20;
413
#elif V8_TARGET_ARCH_IA32
414
  static constexpr int kHeaderPaddingSize = 20;
415
#elif V8_TARGET_ARCH_MIPS
416
  static constexpr int kHeaderPaddingSize = 20;
417
#elif V8_TARGET_ARCH_PPC64
418 419
  static constexpr int kHeaderPaddingSize =
      FLAG_enable_embedded_constant_pool ? 28 : 0;
420 421
#elif V8_TARGET_ARCH_S390X
  static constexpr int kHeaderPaddingSize = 0;
422 423 424
#else
#error Unknown architecture.
#endif
425
  STATIC_ASSERT(FIELD_SIZE(kOptionalPaddingOffset) == kHeaderPaddingSize);
426

427 428 429 430 431
  inline int GetUnwindingInfoSizeOffset() const;

  class BodyDescriptor;

  // Flags layout.  BitField<type, shift, size>.
432 433 434 435
#define CODE_FLAGS_BIT_FIELDS(V, _)    \
  V(HasUnwindingInfoField, bool, 1, _) \
  V(KindField, Kind, 5, _)             \
  V(IsTurbofannedField, bool, 1, _)    \
436 437
  V(StackSlotsField, int, 24, _)       \
  V(IsOffHeapTrampoline, bool, 1, _)
438 439
  DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
#undef CODE_FLAGS_BIT_FIELDS
440
  static_assert(NUMBER_OF_KINDS <= KindField::kMax, "Code::KindField size");
441 442
  static_assert(IsOffHeapTrampoline::kNext <= 32,
                "Code::flags field exhausted");
443

444 445 446
  // KindSpecificFlags layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
#define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
  V(MarkedForDeoptimizationField, bool, 1, _)     \
447
  V(EmbeddedObjectsClearedField, bool, 1, _)      \
448 449 450 451 452 453 454 455 456 457 458
  V(DeoptAlreadyCountedField, bool, 1, _)         \
  V(CanHaveWeakObjectsField, bool, 1, _)          \
  V(IsPromiseRejectionField, bool, 1, _)          \
  V(IsExceptionCaughtField, bool, 1, _)
  DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
#undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
  static_assert(IsExceptionCaughtField::kNext <= 32, "KindSpecificFlags full");

  // The {marked_for_deoptimization} field is accessed from generated code.
  static const int kMarkedForDeoptimizationBit =
      MarkedForDeoptimizationField::kShift;
459 460

  static const int kArgumentsBits = 16;
461 462
  // Reserve one argument count value as the "don't adapt arguments" sentinel.
  static const int kMaxArguments = (1 << kArgumentsBits) - 2;
463 464 465 466 467 468 469

 private:
  friend class RelocIterator;

  bool is_promise_rejection() const;
  bool is_exception_caught() const;

470
  OBJECT_CONSTRUCTORS(Code, HeapObject);
471 472 473 474 475 476 477 478
};

class Code::OptimizedCodeIterator {
 public:
  explicit OptimizedCodeIterator(Isolate* isolate);
  Code Next();

 private:
479
  Context next_context_;
480 481 482
  Code current_code_;
  Isolate* isolate_;

483
  DISALLOW_HEAP_ALLOCATION(no_gc)
484
  DISALLOW_COPY_AND_ASSIGN(OptimizedCodeIterator);
485 486
};

487 488 489 490 491
// CodeDataContainer is a container for all mutable fields associated with its
// referencing {Code} object. Since {Code} objects reside on write-protected
// pages within the heap, its header fields need to be immutable. There always
// is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
// field {Code::code_data_container} itself is immutable.
492
class CodeDataContainer : public HeapObject {
493
 public:
494
  NEVER_READ_ONLY_SPACE
495
  DECL_ACCESSORS(next_code_link, Object)
496 497 498 499 500 501
  DECL_INT_ACCESSORS(kind_specific_flags)

  // Clear uninitialized padding space. This ensures that the snapshot content
  // is deterministic.
  inline void clear_padding();

502
  DECL_CAST(CodeDataContainer)
503 504 505 506 507

  // Dispatched behavior.
  DECL_PRINTER(CodeDataContainer)
  DECL_VERIFIER(CodeDataContainer)

508 509 510 511 512 513 514 515 516 517 518 519 520 521
// Layout description.
#define CODE_DATA_FIELDS(V)                                 \
  /* Weak pointer fields. */                                \
  V(kPointerFieldsStrongEndOffset, 0)                       \
  V(kNextCodeLinkOffset, kTaggedSize)                       \
  V(kPointerFieldsWeakEndOffset, 0)                         \
  /* Raw data fields. */                                    \
  V(kKindSpecificFlagsOffset, kIntSize)                     \
  V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
  /* Total size. */                                         \
  V(kSize, 0)

  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
#undef CODE_DATA_FIELDS
522

523
  class BodyDescriptor;
524

525
  OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
526 527
};

528
class AbstractCode : public HeapObject {
529
 public:
530
  NEVER_READ_ONLY_SPACE
531 532 533 534 535 536 537 538 539 540 541 542 543 544 545
  // All code kinds and INTERPRETED_FUNCTION.
  enum Kind {
#define DEFINE_CODE_KIND_ENUM(name) name,
    CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
#undef DEFINE_CODE_KIND_ENUM
        INTERPRETED_FUNCTION,
    NUMBER_OF_KINDS
  };

  static const char* Kind2String(Kind kind);

  int SourcePosition(int offset);
  int SourceStatementPosition(int offset);

  // Returns the address of the first instruction.
546
  inline Address raw_instruction_start();
547

548 549 550 551 552
  // Returns the address of the first instruction. For off-heap code objects
  // this differs from instruction_start (which would point to the off-heap
  // trampoline instead).
  inline Address InstructionStart();

553
  // Returns the address right after the last instruction.
554
  inline Address raw_instruction_end();
555

556 557 558 559 560
  // Returns the address right after the last instruction. For off-heap code
  // objects this differs from instruction_end (which would point to the
  // off-heap trampoline instead).
  inline Address InstructionEnd();

561
  // Returns the size of the code instructions.
562
  inline int raw_instruction_size();
563

564 565
  // Returns the size of the native instructions, including embedded
  // data such as the safepoints table. For off-heap code objects
566 567
  // this may differ from instruction_size in that this will return the size of
  // the off-heap instruction stream rather than the on-heap trampoline located
568 569 570
  // at instruction_start.
  inline int InstructionSize();

571
  // Return the source position table.
572
  inline ByteArray source_position_table();
573

574
  inline Object stack_frame_cache();
575
  static void SetStackFrameCache(Handle<AbstractCode> abstract_code,
576
                                 Handle<SimpleNumberDictionary> cache);
577 578 579 580 581 582
  void DropStackFrameCache();

  // Returns the size of instructions and the metadata.
  inline int SizeIncludingMetadata();

  // Returns true if pc is inside this object's instructions.
583
  inline bool contains(Address pc);
584 585 586 587 588 589 590 591

  // Returns the AbstractCode::Kind of the code.
  inline Kind kind();

  // Calculate the size of the code object to report for log events. This takes
  // the layout of the code object into account.
  inline int ExecutableSize();

592
  DECL_CAST(AbstractCode)
593
  inline Code GetCode();
594
  inline BytecodeArray GetBytecodeArray();
595 596 597 598

  // Max loop nesting marker used to postpose OSR. We don't take loop
  // nesting that is deeper than 5 levels into account.
  static const int kMaxLoopNestingMarker = 6;
599

600
  OBJECT_CONSTRUCTORS(AbstractCode, HeapObject);
601 602
};

603 604 605 606
// Dependent code is a singly linked list of weak fixed arrays. Each array
// contains weak pointers to code objects for one dependent group. The suffix of
// the array can be filled with the undefined value if the number of codes is
// less than the length of the array.
607 608 609 610 611 612 613 614 615 616 617
//
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
// | next | count & group 1 | code 1 | code 2 | ... | code n | undefined | ... |
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
//    |
//    V
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
// | next | count & group 2 | code 1 | code 2 | ... | code m | undefined | ... |
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
//    |
//    V
618
// empty_weak_fixed_array()
619
//
620
// The list of weak fixed arrays is ordered by dependency groups.
621

622
class DependentCode : public WeakFixedArray {
623
 public:
624
  DECL_CAST(DependentCode)
625

626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651
  enum DependencyGroup {
    // Group of code that embed a transition to this map, and depend on being
    // deoptimized when the transition is replaced by a new version.
    kTransitionGroup,
    // Group of code that omit run-time prototype checks for prototypes
    // described by this map. The group is deoptimized whenever an object
    // described by this map changes shape (and transitions to a new map),
    // possibly invalidating the assumptions embedded in the code.
    kPrototypeCheckGroup,
    // Group of code that depends on global property values in property cells
    // not being changed.
    kPropertyCellChangedGroup,
    // Group of code that omit run-time checks for field(s) introduced by
    // this map, i.e. for the field type.
    kFieldOwnerGroup,
    // Group of code that omit run-time type checks for initial maps of
    // constructors.
    kInitialMapChangedGroup,
    // Group of code that depends on tenuring information in AllocationSites
    // not being changed.
    kAllocationSiteTenuringChangedGroup,
    // Group of code that depends on element transition information in
    // AllocationSites not being changed.
    kAllocationSiteTransitionChangedGroup
  };

652
  // Register a code dependency of {cell} on {object}.
653 654 655 656
  V8_EXPORT_PRIVATE static void InstallDependency(Isolate* isolate,
                                                  const MaybeObjectHandle& code,
                                                  Handle<HeapObject> object,
                                                  DependencyGroup group);
657 658

  void DeoptimizeDependentCodeGroup(Isolate* isolate, DependencyGroup group);
659

660
  bool MarkCodeForDeoptimization(Isolate* isolate, DependencyGroup group);
661

662
  // The following low-level accessors are exposed only for tests.
663
  inline DependencyGroup group();
664
  inline MaybeObject object_at(int i);
665
  inline int count();
666
  inline DependentCode next_link();
667

668
 private:
669 670
  static const char* DependencyGroupName(DependencyGroup group);

671
  // Get/Set {object}'s {DependentCode}.
672
  static DependentCode GetDependentCode(Handle<HeapObject> object);
673 674
  static void SetDependentCode(Handle<HeapObject> object,
                               Handle<DependentCode> dep);
675

676
  static Handle<DependentCode> New(Isolate* isolate, DependencyGroup group,
677
                                   const MaybeObjectHandle& object,
678
                                   Handle<DependentCode> next);
679 680 681 682
  static Handle<DependentCode> EnsureSpace(Isolate* isolate,
                                           Handle<DependentCode> entries);
  static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
                                              Handle<DependentCode> entries,
683
                                              DependencyGroup group,
684
                                              const MaybeObjectHandle& code);
685

686 687 688
  // Compact by removing cleared weak cells and return true if there was
  // any cleared weak cell.
  bool Compact();
689

690 691 692 693
  static int Grow(int number_of_entries) {
    if (number_of_entries < 5) return number_of_entries + 1;
    return number_of_entries * 5 / 4;
  }
694 695 696 697 698 699

  static const int kGroupCount = kAllocationSiteTransitionChangedGroup + 1;
  static const int kNextLinkIndex = 0;
  static const int kFlagsIndex = 1;
  static const int kCodesStartIndex = 2;

700
  inline void set_next_link(DependentCode next);
701
  inline void set_count(int value);
702
  inline void set_object_at(int i, MaybeObject object);
703 704 705
  inline void clear_at(int i);
  inline void copy(int from, int to);

706 707 708 709 710
  inline int flags();
  inline void set_flags(int flags);
  class GroupField : public BitField<int, 0, 3> {};
  class CountField : public BitField<int, 3, 27> {};
  STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
711

712
  OBJECT_CONSTRUCTORS(DependentCode, WeakFixedArray);
713 714 715
};

// BytecodeArray represents a sequence of interpreter bytecodes.
716
class BytecodeArray : public FixedArrayBase {
717 718 719 720 721 722 723 724 725 726 727 728 729 730 731
 public:
  enum Age {
    kNoAgeBytecodeAge = 0,
    kQuadragenarianBytecodeAge,
    kQuinquagenarianBytecodeAge,
    kSexagenarianBytecodeAge,
    kSeptuagenarianBytecodeAge,
    kOctogenarianBytecodeAge,
    kAfterLastBytecodeAge,
    kFirstBytecodeAge = kNoAgeBytecodeAge,
    kLastBytecodeAge = kAfterLastBytecodeAge - 1,
    kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
    kIsOldBytecodeAge = kSexagenarianBytecodeAge
  };

732
  static constexpr int SizeFor(int length) {
733 734 735 736
    return OBJECT_POINTER_ALIGN(kHeaderSize + length);
  }

  // Setter and getter
737
  inline byte get(int index) const;
738 739 740 741 742 743 744
  inline void set(int index, byte value);

  // Returns data start address.
  inline Address GetFirstBytecodeAddress();

  // Accessors for frame size.
  inline int frame_size() const;
745
  inline void set_frame_size(int frame_size);
746 747 748 749 750

  // Accessor for register count (derived from frame_size).
  inline int register_count() const;

  // Accessors for parameter count (including implicit 'this' receiver).
751 752
  inline int parameter_count() const;
  inline void set_parameter_count(int number_of_parameters);
753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769

  // Register used to pass the incoming new.target or generator object from the
  // fucntion call.
  inline interpreter::Register incoming_new_target_or_generator_register()
      const;
  inline void set_incoming_new_target_or_generator_register(
      interpreter::Register incoming_new_target_or_generator_register);

  // Accessors for OSR loop nesting level.
  inline int osr_loop_nesting_level() const;
  inline void set_osr_loop_nesting_level(int depth);

  // Accessors for bytecode's code age.
  inline Age bytecode_age() const;
  inline void set_bytecode_age(Age age);

  // Accessors for the constant pool.
770
  DECL_ACCESSORS(constant_pool, FixedArray)
771 772

  // Accessors for handler table containing offsets of exception handlers.
773
  DECL_ACCESSORS(handler_table, ByteArray)
774

775 776 777 778 779 780 781 782
  // Accessors for source position table. Can contain:
  // * undefined (initial value)
  // * empty_byte_array (for bytecode generated for functions that will never
  // have source positions, e.g. native functions).
  // * ByteArray (when source positions have been collected for the bytecode)
  // * SourcePositionTableWithFrameCache (as above but with a frame cache)
  // * exception (when an error occurred while explicitly collecting source
  // positions for pre-existing bytecode).
783 784
  DECL_ACCESSORS(source_position_table, Object)

785 786 787 788 789 790 791 792 793
  // This must only be called if source position collection has already been
  // attempted. (If it failed because of an exception then it will return
  // empty_byte_array).
  inline ByteArray SourcePositionTable() const;
  // If source positions have not been collected or an exception has been thrown
  // this will return empty_byte_array.
  inline ByteArray SourcePositionTableIfCollected() const;
  inline bool HasSourcePositionTable() const;
  inline bool DidSourcePositionGenerationFail() const;
794
  inline void ClearFrameCacheFromSourcePositionTable();
795

796 797 798 799 800 801
  // Indicates that an attempt was made to collect source positions, but that it
  // failed most likely due to stack exhaustion. When in this state
  // |SourcePositionTable| will return an empty byte array rather than crashing
  // as it would if no attempt was ever made to collect source positions.
  inline void SetSourcePositionsFailedToCollect();

802
  DECL_CAST(BytecodeArray)
803 804 805 806

  // Dispatched behavior.
  inline int BytecodeArraySize();

807
  inline int raw_instruction_size();
808 809 810 811 812

  // Returns the size of bytecode and its metadata. This includes the size of
  // bytecode, constant pool, source position table, and handler table.
  inline int SizeIncludingMetadata();

813
  DECL_PRINTER(BytecodeArray)
814 815
  DECL_VERIFIER(BytecodeArray)

816
  V8_EXPORT_PRIVATE void Disassemble(std::ostream& os);
817

818
  void CopyBytecodesTo(BytecodeArray to);
819 820

  // Bytecode aging
821 822
  V8_EXPORT_PRIVATE bool IsOld() const;
  V8_EXPORT_PRIVATE void MakeOlder();
823 824 825 826

  // Clear uninitialized padding space. This ensures that the snapshot content
  // is deterministic.
  inline void clear_padding();
827 828 829

  // Compares only the bytecode array but not any of the header fields.
  bool IsBytecodeEqual(const BytecodeArray other) const;
830

831 832 833 834 835 836 837 838 839 840 841 842 843 844
// Layout description.
#define BYTECODE_ARRAY_FIELDS(V)                           \
  /* Pointer fields. */                                    \
  V(kConstantPoolOffset, kTaggedSize)                      \
  V(kHandlerTableOffset, kTaggedSize)                      \
  V(kSourcePositionTableOffset, kTaggedSize)               \
  V(kFrameSizeOffset, kIntSize)                            \
  V(kParameterSizeOffset, kIntSize)                        \
  V(kIncomingNewTargetOrGeneratorRegisterOffset, kIntSize) \
  V(kOSRNestingLevelOffset, kCharSize)                     \
  V(kBytecodeAgeOffset, kCharSize)                         \
  /* Total size. */                                        \
  V(kHeaderSize, 0)

845
  DEFINE_FIELD_OFFSET_CONSTANTS(FixedArrayBase::kHeaderSize,
846 847
                                BYTECODE_ARRAY_FIELDS)
#undef BYTECODE_ARRAY_FIELDS
848

849 850 851
  // InterpreterEntryTrampoline expects these fields to be next to each other
  // and writes a 16-bit value to reset them.
  STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
852
                kOSRNestingLevelOffset + kCharSize);
853

854 855 856 857 858 859 860
  // Maximal memory consumption for a single BytecodeArray.
  static const int kMaxSize = 512 * MB;
  // Maximal length of a single BytecodeArray.
  static const int kMaxLength = kMaxSize - kHeaderSize;

  class BodyDescriptor;

861
  OBJECT_CONSTRUCTORS(BytecodeArray, FixedArrayBase);
862 863
};

864 865 866
// DeoptimizationData is a fixed array used to hold the deoptimization data for
// optimized code.  It also contains information about functions that were
// inlined.  If N different functions were inlined then the first N elements of
867 868 869
// the literal array will contain these functions.
//
// It can be empty.
870
class DeoptimizationData : public FixedArray {
871 872 873 874 875 876 877 878 879
 public:
  // Layout description.  Indices in the array.
  static const int kTranslationByteArrayIndex = 0;
  static const int kInlinedFunctionCountIndex = 1;
  static const int kLiteralArrayIndex = 2;
  static const int kOsrBytecodeOffsetIndex = 3;
  static const int kOsrPcOffsetIndex = 4;
  static const int kOptimizationIdIndex = 5;
  static const int kSharedFunctionInfoIndex = 6;
880 881
  static const int kInliningPositionsIndex = 7;
  static const int kFirstDeoptEntryIndex = 8;
882 883 884 885 886 887 888 889 890

  // Offsets of deopt entry elements relative to the start of the entry.
  static const int kBytecodeOffsetRawOffset = 0;
  static const int kTranslationIndexOffset = 1;
  static const int kPcOffset = 2;
  static const int kDeoptEntrySize = 3;

// Simple element accessors.
#define DECL_ELEMENT_ACCESSORS(name, type) \
891
  inline type name() const;                \
892
  inline void Set##name(type value);
893

894
  DECL_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
895
  DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
896
  DECL_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
897 898 899
  DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
  DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
  DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
900
  DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
901
  DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
902 903 904 905 906

#undef DECL_ELEMENT_ACCESSORS

// Accessors for elements of the ith deoptimization entry.
#define DECL_ENTRY_ACCESSORS(name, type) \
907
  inline type name(int i) const;         \
908
  inline void Set##name(int i, type value);
909 910 911 912 913 914 915 916 917 918 919 920 921 922 923 924 925

  DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
  DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
  DECL_ENTRY_ACCESSORS(Pc, Smi)

#undef DECL_ENTRY_ACCESSORS

  inline BailoutId BytecodeOffset(int i);

  inline void SetBytecodeOffset(int i, BailoutId value);

  inline int DeoptCount();

  static const int kNotInlinedIndex = -1;

  // Returns the inlined function at the given position in LiteralArray, or the
  // outer function if index == kNotInlinedIndex.
926
  class SharedFunctionInfo GetInlinedFunction(int index);
927

928 929
  // Allocates a DeoptimizationData.
  static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
930
                                        AllocationType allocation);
931

932
  // Return an empty DeoptimizationData.
933
  V8_EXPORT_PRIVATE static Handle<DeoptimizationData> Empty(Isolate* isolate);
934

935
  DECL_CAST(DeoptimizationData)
936 937

#ifdef ENABLE_DISASSEMBLER
938
  void DeoptimizationDataPrint(std::ostream& os);  // NOLINT
939 940 941 942 943 944 945 946
#endif

 private:
  static int IndexForEntry(int i) {
    return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
  }

  static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
947

948
  OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray);
949 950
};

951
class SourcePositionTableWithFrameCache : public Struct {
952
 public:
953 954
  DECL_ACCESSORS(source_position_table, ByteArray)
  DECL_ACCESSORS(stack_frame_cache, SimpleNumberDictionary)
955

956
  DECL_CAST(SourcePositionTableWithFrameCache)
957

958 959
  DECL_PRINTER(SourcePositionTableWithFrameCache)
  DECL_VERIFIER(SourcePositionTableWithFrameCache)
960

961 962 963 964
  // Layout description.
  DEFINE_FIELD_OFFSET_CONSTANTS(
    Struct::kHeaderSize,
    TORQUE_GENERATED_SOURCE_POSITION_TABLE_WITH_FRAME_CACHE_FIELDS)
965

966
  OBJECT_CONSTRUCTORS(SourcePositionTableWithFrameCache, Struct);
967 968
};

969 970 971 972 973 974
}  // namespace internal
}  // namespace v8

#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_CODE_H_