code.h 36 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_CODE_H_
#define V8_OBJECTS_CODE_H_

8
#include "src/base/bit-field.h"
9
#include "src/codegen/handler-table.h"
10
#include "src/objects/contexts.h"
11
#include "src/objects/fixed-array.h"
12
#include "src/objects/heap-object.h"
13
#include "src/objects/objects.h"
14
#include "src/objects/struct.h"
15 16 17 18 19 20 21 22 23

// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

class ByteArray;
class BytecodeArray;
24
class CodeDataContainer;
25
class CodeDesc;
26

27 28 29 30
namespace interpreter {
class Register;
}

31
// Code describes objects with on-the-fly generated machine code.
32
class Code : public HeapObject {
33
 public:
34
  NEVER_READ_ONLY_SPACE
35 36
  // Opaque data type for encapsulating code flags like kind, inline
  // cache state, and arguments count.
37
  using Flags = uint32_t;
38 39 40 41 42 43 44 45

#define CODE_KIND_LIST(V)   \
  V(OPTIMIZED_FUNCTION)     \
  V(BYTECODE_HANDLER)       \
  V(STUB)                   \
  V(BUILTIN)                \
  V(REGEXP)                 \
  V(WASM_FUNCTION)          \
46
  V(WASM_TO_CAPI_FUNCTION)  \
47 48
  V(WASM_TO_JS_FUNCTION)    \
  V(JS_TO_WASM_FUNCTION)    \
49
  V(JS_TO_JS_FUNCTION)      \
50 51 52 53 54 55 56 57 58 59 60 61
  V(C_WASM_ENTRY)

  enum Kind {
#define DEFINE_CODE_KIND_ENUM(name) name,
    CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
#undef DEFINE_CODE_KIND_ENUM
        NUMBER_OF_KINDS
  };

  static const char* Kind2String(Kind kind);

#ifdef ENABLE_DISASSEMBLER
62
  const char* GetName(Isolate* isolate) const;
63
  V8_EXPORT_PRIVATE void Disassemble(const char* name, std::ostream& os,
64
                                     Isolate* isolate,
65
                                     Address current_pc = kNullAddress);
66 67
#endif

68 69
  // [instruction_size]: Size of the native instructions, including embedded
  // data such as the safepoints table.
70 71
  inline int raw_instruction_size() const;
  inline void set_raw_instruction_size(int value);
72

73 74
  // Returns the size of the native instructions, including embedded
  // data such as the safepoints table. For off-heap code objects
75 76
  // this may differ from instruction_size in that this will return the size of
  // the off-heap instruction stream rather than the on-heap trampoline located
77
  // at instruction_start.
78
  inline int InstructionSize() const;
79
  V8_EXPORT_PRIVATE int OffHeapInstructionSize() const;
80

81
  // [relocation_info]: Code relocation information
82
  DECL_ACCESSORS(relocation_info, ByteArray)
83 84 85

  // This function should be called only from GC.
  void ClearEmbeddedObjects(Heap* heap);
86 87

  // [deoptimization_data]: Array containing data for deopt.
88
  DECL_ACCESSORS(deoptimization_data, FixedArray)
89

90
  // [source_position_table]: ByteArray for the source positions table.
91
  DECL_ACCESSORS(source_position_table, Object)
92
  inline ByteArray SourcePositionTable() const;
93
  inline ByteArray SourcePositionTableIfCollected() const;
94

95
  // [code_data_container]: A container indirection for all mutable fields.
96
  DECL_ACCESSORS(code_data_container, CodeDataContainer)
97

98
  // [next_code_link]: Link for lists of optimized or deoptimized code.
99
  // Note that this field is stored in the {CodeDataContainer} to be mutable.
100 101
  inline Object next_code_link() const;
  inline void set_next_code_link(Object value);
102 103

  // Unchecked accessors to be used during GC.
104
  inline ByteArray unchecked_relocation_info() const;
105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120

  inline int relocation_size() const;

  // [kind]: Access to specific code kind.
  inline Kind kind() const;

  inline bool is_optimized_code() const;
  inline bool is_wasm_code() const;

  // Testers for interpreter builtins.
  inline bool is_interpreter_trampoline_builtin() const;

  // Tells whether the code checks the optimization marker in the function's
  // feedback vector.
  inline bool checks_optimization_marker() const;

121
  // Tells whether the outgoing parameters of this code are tagged pointers.
122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139
  inline bool has_tagged_params() const;

  // [is_turbofanned]: For kind STUB or OPTIMIZED_FUNCTION, tells whether the
  // code object was generated by the TurboFan optimizing compiler.
  inline bool is_turbofanned() const;

  // [can_have_weak_objects]: For kind OPTIMIZED_FUNCTION, tells whether the
  // embedded objects in code should be treated weakly.
  inline bool can_have_weak_objects() const;
  inline void set_can_have_weak_objects(bool value);

  // [builtin_index]: For builtins, tells which builtin index the code object
  // has. The builtin index is a non-negative integer for builtins, and -1
  // otherwise.
  inline int builtin_index() const;
  inline void set_builtin_index(int id);
  inline bool is_builtin() const;

140 141 142
  inline unsigned inlined_bytecode_size() const;
  inline void set_inlined_bytecode_size(unsigned size);

143 144 145
  inline bool has_safepoint_info() const;

  // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
146
  // reserved in the code prologue.
147
  inline int stack_slots() const;
148

149 150
  // [safepoint_table_offset]: If {has_safepoint_info()}, the offset in the
  // instruction stream where the safepoint table starts.
151 152
  inline int safepoint_table_offset() const;
  inline void set_safepoint_table_offset(int offset);
153
  Address SafepointTableAddress() const;
154 155
  int safepoint_table_size() const;
  bool has_safepoint_table() const;
156

157 158 159 160
  // [handler_table_offset]: The offset in the instruction stream where the
  // exception handler table starts.
  inline int handler_table_offset() const;
  inline void set_handler_table_offset(int offset);
161
  Address HandlerTableAddress() const;
162 163 164 165 166 167 168 169 170 171 172 173 174 175
  int handler_table_size() const;
  bool has_handler_table() const;

  // [constant_pool offset]: Offset of the constant pool.
  // Valid for FLAG_enable_embedded_constant_pool only
  inline int constant_pool_offset() const;
  inline void set_constant_pool_offset(int offset);
  int constant_pool_size() const;
  bool has_constant_pool() const;

  // [code_comments_offset]: Offset of the code comment section.
  inline int code_comments_offset() const;
  inline void set_code_comments_offset(int offset);
  inline Address code_comments() const;
176
  V8_EXPORT_PRIVATE int code_comments_size() const;
177
  V8_EXPORT_PRIVATE bool has_code_comments() const;
178 179 180

  // The size of the executable instruction area, without embedded metadata.
  int ExecutableInstructionSize() const;
181

182
  // [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
183
  // the code is going to be deoptimized.
184 185 186
  inline bool marked_for_deoptimization() const;
  inline void set_marked_for_deoptimization(bool flag);

187 188 189 190 191 192
  // [deoptimzation_count]: In turboprop we retain the deoptimized code on soft
  // deopts for a certain number of soft deopts. This field keeps track of
  // number of deoptimizations we have seen so far.
  inline int deoptimization_count() const;
  inline void increment_deoptimization_count();

193 194 195 196 197 198
  // [embedded_objects_cleared]: For kind OPTIMIZED_FUNCTION tells whether
  // the embedded objects in the code marked for deoptimization were cleared.
  // Note that embedded_objects_cleared() implies marked_for_deoptimization().
  inline bool embedded_objects_cleared() const;
  inline void set_embedded_objects_cleared(bool flag);

199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215
  // [deopt_already_counted]: For kind OPTIMIZED_FUNCTION tells whether
  // the code was already deoptimized.
  inline bool deopt_already_counted() const;
  inline void set_deopt_already_counted(bool flag);

  // [is_promise_rejection]: For kind BUILTIN tells whether the
  // exception thrown by the code will lead to promise rejection or
  // uncaught if both this and is_exception_caught is set.
  // Use GetBuiltinCatchPrediction to access this.
  inline void set_is_promise_rejection(bool flag);

  // [is_exception_caught]: For kind BUILTIN tells whether the
  // exception thrown by the code will be caught internally or
  // uncaught if both this and is_promise_rejection is set.
  // Use GetBuiltinCatchPrediction to access this.
  inline void set_is_exception_caught(bool flag);

216 217 218 219
  // [is_off_heap_trampoline]: For kind BUILTIN tells whether
  // this is a trampoline to an off-heap builtin.
  inline bool is_off_heap_trampoline() const;

220
  // [constant_pool]: The constant pool for this function.
221
  inline Address constant_pool() const;
222 223 224 225 226 227 228 229 230 231 232 233 234 235

  // Get the safepoint entry for the given pc.
  SafepointEntry GetSafepointEntry(Address pc);

  // The entire code object including its header is copied verbatim to the
  // snapshot so that it can be written in one, fast, memcpy during
  // deserialization. The deserializer will overwrite some pointers, rather
  // like a runtime linker, but the random allocation addresses used in the
  // mksnapshot process would still be present in the unlinked snapshot data,
  // which would make snapshot production non-reproducible. This method wipes
  // out the to-be-overwritten header data for reproducible snapshots.
  inline void WipeOutHeader();

  // Clear uninitialized padding space. This ensures that the snapshot content
236
  // is deterministic. Depending on the V8 build mode there could be no padding.
237 238 239
  inline void clear_padding();
  // Initialize the flags field. Similar to clear_padding above this ensure that
  // the snapshot content is deterministic.
240
  inline void initialize_flags(Kind kind, bool has_unwinding_info,
241 242
                               bool is_turbofanned, int stack_slots,
                               bool is_off_heap_trampoline);
243 244

  // Convert a target address into a code object.
245
  static inline Code GetCodeFromTargetAddress(Address address);
246 247

  // Convert an entry address into an object.
248
  static inline Code GetObjectFromEntryAddress(Address location_of_address);
249 250

  // Returns the address of the first instruction.
251
  inline Address raw_instruction_start() const;
252

253 254 255
  // Returns the address of the first instruction. For off-heap code objects
  // this differs from instruction_start (which would point to the off-heap
  // trampoline instead).
256
  inline Address InstructionStart() const;
257
  V8_EXPORT_PRIVATE Address OffHeapInstructionStart() const;
258

259
  // Returns the address right after the last instruction.
260
  inline Address raw_instruction_end() const;
261

262 263 264
  // Returns the address right after the last instruction. For off-heap code
  // objects this differs from instruction_end (which would point to the
  // off-heap trampoline instead).
265
  inline Address InstructionEnd() const;
266
  V8_EXPORT_PRIVATE Address OffHeapInstructionEnd() const;
267

268 269 270 271 272 273 274 275 276 277 278
  // Returns the size of the instructions, padding, relocation and unwinding
  // information.
  inline int body_size() const;

  // Returns the size of code and its metadata. This includes the size of code
  // relocation information, deoptimization data and handler table.
  inline int SizeIncludingMetadata() const;

  // Returns the address of the first relocation info (read backwards!).
  inline byte* relocation_start() const;

279 280 281
  // Returns the address right after the relocation info (read backwards!).
  inline byte* relocation_end() const;

282 283 284 285 286
  // [has_unwinding_info]: Whether this code object has unwinding information.
  // If it doesn't, unwinding_information_start() will point to invalid data.
  //
  // The body of all code objects has the following layout.
  //
287
  //  +--------------------------+  <-- raw_instruction_start()
288 289 290
  //  |       instructions       |
  //  |           ...            |
  //  +--------------------------+
291 292 293 294 295
  //  |     embedded metadata    |  <-- safepoint_table_offset()
  //  |           ...            |  <-- handler_table_offset()
  //  |                          |  <-- constant_pool_offset()
  //  |                          |  <-- code_comments_offset()
  //  |                          |
296
  //  +--------------------------+  <-- raw_instruction_end()
297
  //
298
  // If has_unwinding_info() is false, raw_instruction_end() points to the first
299 300 301 302 303 304
  // memory location after the end of the code object. Otherwise, the body
  // continues as follows:
  //
  //  +--------------------------+
  //  |    padding to the next   |
  //  |  8-byte aligned address  |
305
  //  +--------------------------+  <-- raw_instruction_end()
306 307 308 309 310 311 312 313 314 315
  //  |   [unwinding_info_size]  |
  //  |        as uint64_t       |
  //  +--------------------------+  <-- unwinding_info_start()
  //  |       unwinding info     |
  //  |            ...           |
  //  +--------------------------+  <-- unwinding_info_end()
  //
  // and unwinding_info_end() points to the first memory location after the end
  // of the code object.
  //
316
  inline bool has_unwinding_info() const;
317 318 319 320 321 322

  // [unwinding_info_size]: Size of the unwinding information.
  inline int unwinding_info_size() const;
  inline void set_unwinding_info_size(int value);

  // Returns the address of the unwinding information, if any.
323
  inline Address unwinding_info_start() const;
324 325

  // Returns the address right after the end of the unwinding information.
326
  inline Address unwinding_info_end() const;
327 328

  // Code entry point.
329
  inline Address entry() const;
330 331

  // Returns true if pc is inside this object's instructions.
332
  inline bool contains(Address pc);
333 334 335 336 337

  // Relocate the code by delta bytes. Called to signal that this code
  // object has been moved by delta bytes.
  void Relocate(intptr_t delta);

338
  // Migrate code from desc without flushing the instruction cache.
339
  void CopyFromNoFlush(Heap* heap, const CodeDesc& desc);
340

341 342
  // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
  // exactly the same size as the RelocInfo in |desc|.
343
  static inline void CopyRelocInfoToByteArray(ByteArray dest,
344 345
                                              const CodeDesc& desc);

346
  // Flushes the instruction cache for the executable instructions of this code
347
  // object. Make sure to call this while the code is still writable.
348 349
  void FlushICache() const;

350 351 352 353 354 355 356 357 358 359
  // Returns the object size for a given body (used for allocation).
  static int SizeFor(int body_size) {
    DCHECK_SIZE_TAG_ALIGNED(body_size);
    return RoundUp(kHeaderSize + body_size, kCodeAlignment);
  }

  // Calculate the size of the code object to report for log events. This takes
  // the layout of the code object into account.
  inline int ExecutableSize() const;

360
  DECL_CAST(Code)
361 362 363 364 365 366 367

  // Dispatched behavior.
  inline int CodeSize() const;

  DECL_PRINTER(Code)
  DECL_VERIFIER(Code)

368
  void PrintDeoptLocation(FILE* out, const char* str, Address pc);
369 370
  bool CanDeoptAt(Address pc);

371 372
  void SetMarkedForDeoptimization(const char* reason);

373 374
  inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();

375
  bool IsIsolateIndependent(Isolate* isolate);
376
  bool IsNativeContextIndependent(Isolate* isolate);
377

378 379
  inline bool CanContainWeakObjects();

380
  inline bool IsWeakObject(HeapObject object);
381

382
  static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
383

384 385 386 387 388
  // Returns false if this is an embedded builtin Code object that's in
  // read_only_space and hence doesn't have execute permissions.
  inline bool IsExecutable();

  // Returns true if the function is inlined in the code.
389
  bool Inlines(SharedFunctionInfo sfi);
390

391
  class OptimizedCodeIterator;
392 393

  // Layout description.
394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410
#define CODE_FIELDS(V)                                                    \
  V(kRelocationInfoOffset, kTaggedSize)                                   \
  V(kDeoptimizationDataOffset, kTaggedSize)                               \
  V(kSourcePositionTableOffset, kTaggedSize)                              \
  V(kCodeDataContainerOffset, kTaggedSize)                                \
  /* Data or code not directly visited by GC directly starts here. */     \
  /* The serializer needs to copy bytes starting from here verbatim. */   \
  /* Objects embedded into code is visited via reloc info. */             \
  V(kDataStart, 0)                                                        \
  V(kInstructionSizeOffset, kIntSize)                                     \
  V(kFlagsOffset, kIntSize)                                               \
  V(kSafepointTableOffsetOffset, kIntSize)                                \
  V(kHandlerTableOffsetOffset, kIntSize)                                  \
  V(kConstantPoolOffsetOffset,                                            \
    FLAG_enable_embedded_constant_pool ? kIntSize : 0)                    \
  V(kCodeCommentsOffsetOffset, kIntSize)                                  \
  V(kBuiltinIndexOffset, kIntSize)                                        \
411
  V(kInlinedBytecodeSizeOffset, kIntSize)                                 \
412 413 414 415
  V(kUnalignedHeaderSize, 0)                                              \
  /* Add padding to align the instruction start following right after */  \
  /* the Code object header. */                                           \
  V(kOptionalPaddingOffset, CODE_POINTER_PADDING(kOptionalPaddingOffset)) \
416 417 418 419
  V(kHeaderSize, 0)

  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
#undef CODE_FIELDS
420

421 422 423
  // This documents the amount of free space we have in each Code object header
  // due to padding for code alignment.
#if V8_TARGET_ARCH_ARM64
424
  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 16 : 28;
425
#elif V8_TARGET_ARCH_MIPS64
426
  static constexpr int kHeaderPaddingSize = 28;
427
#elif V8_TARGET_ARCH_X64
428
  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 16 : 28;
429
#elif V8_TARGET_ARCH_ARM
430
  static constexpr int kHeaderPaddingSize = 16;
431
#elif V8_TARGET_ARCH_IA32
432
  static constexpr int kHeaderPaddingSize = 16;
433
#elif V8_TARGET_ARCH_MIPS
434
  static constexpr int kHeaderPaddingSize = 16;
435
#elif V8_TARGET_ARCH_PPC64
436
  static constexpr int kHeaderPaddingSize =
437 438
      FLAG_enable_embedded_constant_pool ? (COMPRESS_POINTERS_BOOL ? 12 : 24)
                                         : (COMPRESS_POINTERS_BOOL ? 16 : 28);
439
#elif V8_TARGET_ARCH_S390X
440
  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 16 : 28;
441 442 443
#else
#error Unknown architecture.
#endif
444
  STATIC_ASSERT(FIELD_SIZE(kOptionalPaddingOffset) == kHeaderPaddingSize);
445

446 447 448 449
  inline int GetUnwindingInfoSizeOffset() const;

  class BodyDescriptor;

450
  // Flags layout.  base::BitField<type, shift, size>.
451 452 453 454
#define CODE_FLAGS_BIT_FIELDS(V, _)    \
  V(HasUnwindingInfoField, bool, 1, _) \
  V(KindField, Kind, 5, _)             \
  V(IsTurbofannedField, bool, 1, _)    \
455 456
  V(StackSlotsField, int, 24, _)       \
  V(IsOffHeapTrampoline, bool, 1, _)
457 458
  DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
#undef CODE_FLAGS_BIT_FIELDS
459
  static_assert(NUMBER_OF_KINDS <= KindField::kMax, "Code::KindField size");
460
  static_assert(IsOffHeapTrampoline::kLastUsedBit < 32,
461
                "Code::flags field exhausted");
462

463 464 465
  // KindSpecificFlags layout (STUB, BUILTIN and OPTIMIZED_FUNCTION)
#define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
  V(MarkedForDeoptimizationField, bool, 1, _)     \
466
  V(EmbeddedObjectsClearedField, bool, 1, _)      \
467 468 469
  V(DeoptAlreadyCountedField, bool, 1, _)         \
  V(CanHaveWeakObjectsField, bool, 1, _)          \
  V(IsPromiseRejectionField, bool, 1, _)          \
470 471
  V(IsExceptionCaughtField, bool, 1, _)           \
  V(DeoptCountField, int, 4, _)
472 473
  DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
#undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
474
  static_assert(DeoptCountField::kLastUsedBit < 32, "KindSpecificFlags full");
475 476 477 478

  // The {marked_for_deoptimization} field is accessed from generated code.
  static const int kMarkedForDeoptimizationBit =
      MarkedForDeoptimizationField::kShift;
479 480

  static const int kArgumentsBits = 16;
481 482
  // Reserve one argument count value as the "don't adapt arguments" sentinel.
  static const int kMaxArguments = (1 << kArgumentsBits) - 2;
483 484 485 486 487 488 489

 private:
  friend class RelocIterator;

  bool is_promise_rejection() const;
  bool is_exception_caught() const;

490
  OBJECT_CONSTRUCTORS(Code, HeapObject);
491 492 493 494 495 496 497 498
};

class Code::OptimizedCodeIterator {
 public:
  explicit OptimizedCodeIterator(Isolate* isolate);
  Code Next();

 private:
499
  NativeContext next_context_;
500 501 502
  Code current_code_;
  Isolate* isolate_;

503
  DISALLOW_HEAP_ALLOCATION(no_gc)
504
  DISALLOW_COPY_AND_ASSIGN(OptimizedCodeIterator);
505 506
};

507 508 509 510 511
// CodeDataContainer is a container for all mutable fields associated with its
// referencing {Code} object. Since {Code} objects reside on write-protected
// pages within the heap, its header fields need to be immutable. There always
// is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
// field {Code::code_data_container} itself is immutable.
512
class CodeDataContainer : public HeapObject {
513
 public:
514
  NEVER_READ_ONLY_SPACE
515
  DECL_ACCESSORS(next_code_link, Object)
516 517 518 519 520 521
  DECL_INT_ACCESSORS(kind_specific_flags)

  // Clear uninitialized padding space. This ensures that the snapshot content
  // is deterministic.
  inline void clear_padding();

522
  DECL_CAST(CodeDataContainer)
523 524 525 526 527

  // Dispatched behavior.
  DECL_PRINTER(CodeDataContainer)
  DECL_VERIFIER(CodeDataContainer)

528 529 530 531 532 533 534 535 536 537 538 539 540 541
// Layout description.
#define CODE_DATA_FIELDS(V)                                 \
  /* Weak pointer fields. */                                \
  V(kPointerFieldsStrongEndOffset, 0)                       \
  V(kNextCodeLinkOffset, kTaggedSize)                       \
  V(kPointerFieldsWeakEndOffset, 0)                         \
  /* Raw data fields. */                                    \
  V(kKindSpecificFlagsOffset, kIntSize)                     \
  V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize)) \
  /* Total size. */                                         \
  V(kSize, 0)

  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
#undef CODE_DATA_FIELDS
542

543
  class BodyDescriptor;
544

545
  OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
546 547
};

548
class AbstractCode : public HeapObject {
549
 public:
550
  NEVER_READ_ONLY_SPACE
551 552 553 554 555 556 557 558 559 560 561 562 563 564 565
  // All code kinds and INTERPRETED_FUNCTION.
  enum Kind {
#define DEFINE_CODE_KIND_ENUM(name) name,
    CODE_KIND_LIST(DEFINE_CODE_KIND_ENUM)
#undef DEFINE_CODE_KIND_ENUM
        INTERPRETED_FUNCTION,
    NUMBER_OF_KINDS
  };

  static const char* Kind2String(Kind kind);

  int SourcePosition(int offset);
  int SourceStatementPosition(int offset);

  // Returns the address of the first instruction.
566
  inline Address raw_instruction_start();
567

568 569 570 571 572
  // Returns the address of the first instruction. For off-heap code objects
  // this differs from instruction_start (which would point to the off-heap
  // trampoline instead).
  inline Address InstructionStart();

573
  // Returns the address right after the last instruction.
574
  inline Address raw_instruction_end();
575

576 577 578 579 580
  // Returns the address right after the last instruction. For off-heap code
  // objects this differs from instruction_end (which would point to the
  // off-heap trampoline instead).
  inline Address InstructionEnd();

581
  // Returns the size of the code instructions.
582
  inline int raw_instruction_size();
583

584 585
  // Returns the size of the native instructions, including embedded
  // data such as the safepoints table. For off-heap code objects
586 587
  // this may differ from instruction_size in that this will return the size of
  // the off-heap instruction stream rather than the on-heap trampoline located
588 589 590
  // at instruction_start.
  inline int InstructionSize();

591
  // Return the source position table.
592
  inline ByteArray source_position_table();
593 594 595 596 597 598 599

  void DropStackFrameCache();

  // Returns the size of instructions and the metadata.
  inline int SizeIncludingMetadata();

  // Returns true if pc is inside this object's instructions.
600
  inline bool contains(Address pc);
601 602 603 604 605 606 607 608

  // Returns the AbstractCode::Kind of the code.
  inline Kind kind();

  // Calculate the size of the code object to report for log events. This takes
  // the layout of the code object into account.
  inline int ExecutableSize();

609
  DECL_CAST(AbstractCode)
610
  inline Code GetCode();
611
  inline BytecodeArray GetBytecodeArray();
612 613 614 615

  // Max loop nesting marker used to postpose OSR. We don't take loop
  // nesting that is deeper than 5 levels into account.
  static const int kMaxLoopNestingMarker = 6;
616

617
  OBJECT_CONSTRUCTORS(AbstractCode, HeapObject);
618 619
};

620 621 622 623
// Dependent code is a singly linked list of weak fixed arrays. Each array
// contains weak pointers to code objects for one dependent group. The suffix of
// the array can be filled with the undefined value if the number of codes is
// less than the length of the array.
624 625 626 627 628 629 630 631 632 633 634
//
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
// | next | count & group 1 | code 1 | code 2 | ... | code n | undefined | ... |
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
//    |
//    V
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
// | next | count & group 2 | code 1 | code 2 | ... | code m | undefined | ... |
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
//    |
//    V
635
// empty_weak_fixed_array()
636
//
637
// The list of weak fixed arrays is ordered by dependency groups.
638

639
class DependentCode : public WeakFixedArray {
640
 public:
641
  DECL_CAST(DependentCode)
642

643 644 645 646 647 648 649 650 651 652 653 654 655 656
  enum DependencyGroup {
    // Group of code that embed a transition to this map, and depend on being
    // deoptimized when the transition is replaced by a new version.
    kTransitionGroup,
    // Group of code that omit run-time prototype checks for prototypes
    // described by this map. The group is deoptimized whenever an object
    // described by this map changes shape (and transitions to a new map),
    // possibly invalidating the assumptions embedded in the code.
    kPrototypeCheckGroup,
    // Group of code that depends on global property values in property cells
    // not being changed.
    kPropertyCellChangedGroup,
    // Group of code that omit run-time checks for field(s) introduced by
    // this map, i.e. for the field type.
657 658 659
    kFieldTypeGroup,
    kFieldConstGroup,
    kFieldRepresentationGroup,
660 661 662 663 664 665 666 667 668 669 670
    // Group of code that omit run-time type checks for initial maps of
    // constructors.
    kInitialMapChangedGroup,
    // Group of code that depends on tenuring information in AllocationSites
    // not being changed.
    kAllocationSiteTenuringChangedGroup,
    // Group of code that depends on element transition information in
    // AllocationSites not being changed.
    kAllocationSiteTransitionChangedGroup
  };

671
  // Register a code dependency of {cell} on {object}.
672 673 674 675
  V8_EXPORT_PRIVATE static void InstallDependency(Isolate* isolate,
                                                  const MaybeObjectHandle& code,
                                                  Handle<HeapObject> object,
                                                  DependencyGroup group);
676

677
  void DeoptimizeDependentCodeGroup(DependencyGroup group);
678

679
  bool MarkCodeForDeoptimization(DependencyGroup group);
680

681
  // The following low-level accessors are exposed only for tests.
682
  inline DependencyGroup group();
683
  inline MaybeObject object_at(int i);
684
  inline int count();
685
  inline DependentCode next_link();
686

687
 private:
688 689
  static const char* DependencyGroupName(DependencyGroup group);

690
  // Get/Set {object}'s {DependentCode}.
691
  static DependentCode GetDependentCode(Handle<HeapObject> object);
692 693
  static void SetDependentCode(Handle<HeapObject> object,
                               Handle<DependentCode> dep);
694

695
  static Handle<DependentCode> New(Isolate* isolate, DependencyGroup group,
696
                                   const MaybeObjectHandle& object,
697
                                   Handle<DependentCode> next);
698 699 700 701
  static Handle<DependentCode> EnsureSpace(Isolate* isolate,
                                           Handle<DependentCode> entries);
  static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
                                              Handle<DependentCode> entries,
702
                                              DependencyGroup group,
703
                                              const MaybeObjectHandle& code);
704

705 706 707
  // Compact by removing cleared weak cells and return true if there was
  // any cleared weak cell.
  bool Compact();
708

709 710 711 712
  static int Grow(int number_of_entries) {
    if (number_of_entries < 5) return number_of_entries + 1;
    return number_of_entries * 5 / 4;
  }
713 714 715 716 717 718

  static const int kGroupCount = kAllocationSiteTransitionChangedGroup + 1;
  static const int kNextLinkIndex = 0;
  static const int kFlagsIndex = 1;
  static const int kCodesStartIndex = 2;

719
  inline void set_next_link(DependentCode next);
720
  inline void set_count(int value);
721
  inline void set_object_at(int i, MaybeObject object);
722 723 724
  inline void clear_at(int i);
  inline void copy(int from, int to);

725 726
  inline int flags();
  inline void set_flags(int flags);
727 728
  using GroupField = base::BitField<int, 0, 5>;
  using CountField = base::BitField<int, 5, 27>;
729
  STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
730

731
  OBJECT_CONSTRUCTORS(DependentCode, WeakFixedArray);
732 733 734
};

// BytecodeArray represents a sequence of interpreter bytecodes.
735
class BytecodeArray : public FixedArrayBase {
736 737 738 739 740 741 742 743 744 745 746 747 748 749 750
 public:
  enum Age {
    kNoAgeBytecodeAge = 0,
    kQuadragenarianBytecodeAge,
    kQuinquagenarianBytecodeAge,
    kSexagenarianBytecodeAge,
    kSeptuagenarianBytecodeAge,
    kOctogenarianBytecodeAge,
    kAfterLastBytecodeAge,
    kFirstBytecodeAge = kNoAgeBytecodeAge,
    kLastBytecodeAge = kAfterLastBytecodeAge - 1,
    kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
    kIsOldBytecodeAge = kSexagenarianBytecodeAge
  };

751
  static constexpr int SizeFor(int length) {
752 753 754 755
    return OBJECT_POINTER_ALIGN(kHeaderSize + length);
  }

  // Setter and getter
756
  inline byte get(int index) const;
757 758 759 760 761 762
  inline void set(int index, byte value);

  // Returns data start address.
  inline Address GetFirstBytecodeAddress();

  // Accessors for frame size.
763 764
  inline int32_t frame_size() const;
  inline void set_frame_size(int32_t frame_size);
765 766 767 768 769

  // Accessor for register count (derived from frame_size).
  inline int register_count() const;

  // Accessors for parameter count (including implicit 'this' receiver).
770 771
  inline int32_t parameter_count() const;
  inline void set_parameter_count(int32_t number_of_parameters);
772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788

  // Register used to pass the incoming new.target or generator object from the
  // fucntion call.
  inline interpreter::Register incoming_new_target_or_generator_register()
      const;
  inline void set_incoming_new_target_or_generator_register(
      interpreter::Register incoming_new_target_or_generator_register);

  // Accessors for OSR loop nesting level.
  inline int osr_loop_nesting_level() const;
  inline void set_osr_loop_nesting_level(int depth);

  // Accessors for bytecode's code age.
  inline Age bytecode_age() const;
  inline void set_bytecode_age(Age age);

  // Accessors for the constant pool.
789
  DECL_ACCESSORS(constant_pool, FixedArray)
790 791

  // Accessors for handler table containing offsets of exception handlers.
792
  DECL_ACCESSORS(handler_table, ByteArray)
793

794 795 796 797 798 799 800
  // Accessors for source position table. Can contain:
  // * undefined (initial value)
  // * empty_byte_array (for bytecode generated for functions that will never
  // have source positions, e.g. native functions).
  // * ByteArray (when source positions have been collected for the bytecode)
  // * exception (when an error occurred while explicitly collecting source
  // positions for pre-existing bytecode).
801 802
  DECL_ACCESSORS(source_position_table, Object)

803 804 805 806 807 808 809 810 811
  // This must only be called if source position collection has already been
  // attempted. (If it failed because of an exception then it will return
  // empty_byte_array).
  inline ByteArray SourcePositionTable() const;
  // If source positions have not been collected or an exception has been thrown
  // this will return empty_byte_array.
  inline ByteArray SourcePositionTableIfCollected() const;
  inline bool HasSourcePositionTable() const;
  inline bool DidSourcePositionGenerationFail() const;
812

813 814 815 816 817 818
  // Indicates that an attempt was made to collect source positions, but that it
  // failed most likely due to stack exhaustion. When in this state
  // |SourcePositionTable| will return an empty byte array rather than crashing
  // as it would if no attempt was ever made to collect source positions.
  inline void SetSourcePositionsFailedToCollect();

819
  DECL_CAST(BytecodeArray)
820 821 822 823

  // Dispatched behavior.
  inline int BytecodeArraySize();

824
  inline int raw_instruction_size();
825 826 827 828 829

  // Returns the size of bytecode and its metadata. This includes the size of
  // bytecode, constant pool, source position table, and handler table.
  inline int SizeIncludingMetadata();

830
  DECL_PRINTER(BytecodeArray)
831 832
  DECL_VERIFIER(BytecodeArray)

833
  V8_EXPORT_PRIVATE void Disassemble(std::ostream& os);
834

835
  void CopyBytecodesTo(BytecodeArray to);
836 837

  // Bytecode aging
838 839
  V8_EXPORT_PRIVATE bool IsOld() const;
  V8_EXPORT_PRIVATE void MakeOlder();
840 841 842 843

  // Clear uninitialized padding space. This ensures that the snapshot content
  // is deterministic.
  inline void clear_padding();
844

845
  // Layout description.
846
  DEFINE_FIELD_OFFSET_CONSTANTS(FixedArrayBase::kHeaderSize,
847
                                TORQUE_GENERATED_BYTECODE_ARRAY_FIELDS)
848

849 850 851
  // InterpreterEntryTrampoline expects these fields to be next to each other
  // and writes a 16-bit value to reset them.
  STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
852
                kOsrNestingLevelOffset + kCharSize);
853

854 855 856 857 858 859 860
  // Maximal memory consumption for a single BytecodeArray.
  static const int kMaxSize = 512 * MB;
  // Maximal length of a single BytecodeArray.
  static const int kMaxLength = kMaxSize - kHeaderSize;

  class BodyDescriptor;

861
  OBJECT_CONSTRUCTORS(BytecodeArray, FixedArrayBase);
862 863
};

864 865 866
// DeoptimizationData is a fixed array used to hold the deoptimization data for
// optimized code.  It also contains information about functions that were
// inlined.  If N different functions were inlined then the first N elements of
867 868 869
// the literal array will contain these functions.
//
// It can be empty.
870
class DeoptimizationData : public FixedArray {
871 872 873 874 875 876 877 878 879
 public:
  // Layout description.  Indices in the array.
  static const int kTranslationByteArrayIndex = 0;
  static const int kInlinedFunctionCountIndex = 1;
  static const int kLiteralArrayIndex = 2;
  static const int kOsrBytecodeOffsetIndex = 3;
  static const int kOsrPcOffsetIndex = 4;
  static const int kOptimizationIdIndex = 5;
  static const int kSharedFunctionInfoIndex = 6;
880
  static const int kInliningPositionsIndex = 7;
881
  static const int kDeoptExitStartIndex = 8;
882 883
  static const int kNonLazyDeoptCountIndex = 9;
  static const int kFirstDeoptEntryIndex = 10;
884 885 886 887 888 889 890 891 892

  // Offsets of deopt entry elements relative to the start of the entry.
  static const int kBytecodeOffsetRawOffset = 0;
  static const int kTranslationIndexOffset = 1;
  static const int kPcOffset = 2;
  static const int kDeoptEntrySize = 3;

// Simple element accessors.
#define DECL_ELEMENT_ACCESSORS(name, type) \
893
  inline type name() const;                \
894
  inline void Set##name(type value);
895

896
  DECL_ELEMENT_ACCESSORS(TranslationByteArray, ByteArray)
897
  DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
898
  DECL_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
899 900 901
  DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
  DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
  DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
902
  DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
903
  DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
904
  DECL_ELEMENT_ACCESSORS(DeoptExitStart, Smi)
905
  DECL_ELEMENT_ACCESSORS(NonLazyDeoptCount, Smi)
906 907 908 909 910

#undef DECL_ELEMENT_ACCESSORS

// Accessors for elements of the ith deoptimization entry.
#define DECL_ENTRY_ACCESSORS(name, type) \
911
  inline type name(int i) const;         \
912
  inline void Set##name(int i, type value);
913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929

  DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
  DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
  DECL_ENTRY_ACCESSORS(Pc, Smi)

#undef DECL_ENTRY_ACCESSORS

  inline BailoutId BytecodeOffset(int i);

  inline void SetBytecodeOffset(int i, BailoutId value);

  inline int DeoptCount();

  static const int kNotInlinedIndex = -1;

  // Returns the inlined function at the given position in LiteralArray, or the
  // outer function if index == kNotInlinedIndex.
930
  class SharedFunctionInfo GetInlinedFunction(int index);
931

932 933
  // Allocates a DeoptimizationData.
  static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
934
                                        AllocationType allocation);
935

936
  // Return an empty DeoptimizationData.
937
  V8_EXPORT_PRIVATE static Handle<DeoptimizationData> Empty(Isolate* isolate);
938

939
  DECL_CAST(DeoptimizationData)
940 941

#ifdef ENABLE_DISASSEMBLER
942
  void DeoptimizationDataPrint(std::ostream& os);  // NOLINT
943 944 945 946 947 948 949 950
#endif

 private:
  static int IndexForEntry(int i) {
    return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
  }

  static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
951

952
  OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray);
953 954 955 956 957 958 959 960
};

}  // namespace internal
}  // namespace v8

#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_CODE_H_