code.h 41.6 KB
Newer Older
1 2 3 4 5 6 7
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_CODE_H_
#define V8_OBJECTS_CODE_H_

8
#include "src/base/bit-field.h"
9
#include "src/builtins/builtins.h"
10
#include "src/codegen/handler-table.h"
11
#include "src/deoptimizer/translation-array.h"
12
#include "src/objects/code-kind.h"
13
#include "src/objects/contexts.h"
14
#include "src/objects/fixed-array.h"
15
#include "src/objects/heap-object.h"
16
#include "src/objects/objects.h"
17
#include "src/objects/shared-function-info.h"
18
#include "src/objects/struct.h"
19 20 21 22 23 24 25 26 27

// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

class ByteArray;
class BytecodeArray;
28
class CodeDataContainer;
29
class CodeDesc;
30

31 32
namespace interpreter {
class Register;
33
}  // namespace interpreter
34

35 36
#include "torque-generated/src/objects/code-tq.inc"

37 38 39 40 41 42 43 44 45
// CodeDataContainer is a container for all mutable fields associated with its
// referencing {Code} object. Since {Code} objects reside on write-protected
// pages within the heap, its header fields need to be immutable. There always
// is a 1-to-1 relation between {Code} and {CodeDataContainer}, the referencing
// field {Code::code_data_container} itself is immutable.
class CodeDataContainer : public HeapObject {
 public:
  NEVER_READ_ONLY_SPACE
  DECL_ACCESSORS(next_code_link, Object)
46
  DECL_RELAXED_INT32_ACCESSORS(kind_specific_flags)
47 48 49 50 51

  // Clear uninitialized padding space. This ensures that the snapshot content
  // is deterministic.
  inline void clear_padding();

52 53 54
  // Back-reference to the Code object.
  // Available only when V8_EXTERNAL_CODE_SPACE is defined.
  DECL_GETTER(code, Code)
55
  DECL_RELAXED_GETTER(code, Code)
56 57 58 59 60 61 62 63 64 65 66 67 68 69

  // Cached value of code().InstructionStart().
  // Available only when V8_EXTERNAL_CODE_SPACE is defined.
  DECL_GETTER(code_entry_point, Address)

  inline void SetCodeAndEntryPoint(
      Isolate* isolate_for_sandbox, Code code,
      WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
  // Updates the value of the code entry point. The code must be equal to
  // the code() value.
  inline void UpdateCodeEntryPoint(Isolate* isolate_for_sandbox, Code code);

  inline void AllocateExternalPointerEntries(Isolate* isolate);

70 71 72
  // Alias for code_entry_point to make it API compatible with Code.
  inline Address InstructionStart() const;

73 74 75 76 77 78 79
  DECL_CAST(CodeDataContainer)

  // Dispatched behavior.
  DECL_PRINTER(CodeDataContainer)
  DECL_VERIFIER(CodeDataContainer)

// Layout description.
80 81 82 83 84 85
#define CODE_DATA_FIELDS(V)                                     \
  /* Strong pointer fields. */                                  \
  V(kPointerFieldsStrongEndOffset, 0)                           \
  /* Weak pointer fields. */                                    \
  V(kNextCodeLinkOffset, kTaggedSize)                           \
  V(kPointerFieldsWeakEndOffset, 0)                             \
86 87 88
  /* Strong Code pointer fields. */                             \
  V(kCodeOffset, V8_EXTERNAL_CODE_SPACE_BOOL ? kTaggedSize : 0) \
  V(kCodePointerFieldsStrongEndOffset, 0)                       \
89 90 91 92 93 94
  /* Raw data fields. */                                        \
  V(kCodeEntryPointOffset,                                      \
    V8_EXTERNAL_CODE_SPACE_BOOL ? kExternalPointerSize : 0)     \
  V(kKindSpecificFlagsOffset, kInt32Size)                       \
  V(kUnalignedSize, OBJECT_POINTER_PADDING(kUnalignedSize))     \
  /* Total size. */                                             \
95 96 97 98 99 100 101
  V(kSize, 0)

  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_DATA_FIELDS)
#undef CODE_DATA_FIELDS

  class BodyDescriptor;

102 103
 private:
  DECL_ACCESSORS(raw_code, Object)
104
  DECL_RELAXED_ACCESSORS(raw_code, Object)
105 106 107 108
  inline void set_code_entry_point(Isolate* isolate, Address value);

  friend Factory;

109 110 111
  OBJECT_CONSTRUCTORS(CodeDataContainer, HeapObject);
};

112
// Code describes objects with on-the-fly generated machine code.
113
class Code : public HeapObject {
114
 public:
115
  NEVER_READ_ONLY_SPACE
116 117
  // Opaque data type for encapsulating code flags like kind, inline
  // cache state, and arguments count.
118
  using Flags = uint32_t;
119

120 121 122 123 124
  // All Code objects have the following layout:
  //
  //  +--------------------------+
  //  |          header          |
  //  | padded to code alignment |
125
  //  +--------------------------+  <-- raw_body_start()
126
  //  |       instructions       |   == raw_instruction_start()
127
  //  |           ...            |
128
  //  | padded to meta alignment |      see kMetadataAlignment
129
  //  +--------------------------+  <-- raw_instruction_end()
130 131 132 133 134 135
  //  |         metadata         |   == raw_metadata_start() (MS)
  //  |           ...            |
  //  |                          |  <-- MS + handler_table_offset()
  //  |                          |  <-- MS + constant_pool_offset()
  //  |                          |  <-- MS + code_comments_offset()
  //  |                          |  <-- MS + unwinding_info_offset()
136
  //  | padded to obj alignment  |
137
  //  +--------------------------+  <-- raw_metadata_end() == raw_body_end()
138 139 140 141
  //  | padded to code alignment |
  //  +--------------------------+
  //
  // In other words, the variable-size 'body' consists of 'instructions' and
142
  // 'metadata'.
143 144 145 146 147
  //
  // Note the accessor functions below may be prefixed with 'raw'. In this case,
  // raw accessors (e.g. raw_instruction_start) always refer to the on-heap
  // Code object, while camel-case accessors (e.g. InstructionStart) may refer
  // to an off-heap area in the case of embedded builtins.
148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169
  //
  // Embedded builtins are on-heap Code objects, with an out-of-line body
  // section. The on-heap Code object contains an essentially empty body
  // section, while accessors, as mentioned above, redirect to the off-heap
  // area. Metadata table offsets remain relative to MetadataStart(), i.e. they
  // point into the off-heap metadata section. The off-heap layout is described
  // in detail in the EmbeddedData class, but at a high level one can assume a
  // dedicated, out-of-line, instruction and metadata section for each embedded
  // builtin *in addition* to the on-heap Code object:
  //
  //  +--------------------------+  <-- InstructionStart()
  //  |   off-heap instructions  |
  //  |           ...            |
  //  +--------------------------+  <-- InstructionEnd()
  //
  //  +--------------------------+  <-- MetadataStart() (MS)
  //  |    off-heap metadata     |
  //  |           ...            |  <-- MS + handler_table_offset()
  //  |                          |  <-- MS + constant_pool_offset()
  //  |                          |  <-- MS + code_comments_offset()
  //  |                          |  <-- MS + unwinding_info_offset()
  //  +--------------------------+  <-- MetadataEnd()
170

171 172
  // Constants for use in static asserts, stating whether the body is adjacent,
  // i.e. instructions and metadata areas are adjacent.
173
  static constexpr bool kOnHeapBodyIsContiguous = true;
174
  static constexpr bool kOffHeapBodyIsContiguous = false;
175 176 177
  static constexpr bool kBodyIsContiguous =
      kOnHeapBodyIsContiguous && kOffHeapBodyIsContiguous;

178 179 180 181 182 183 184 185 186 187 188 189
  inline Address raw_body_start() const;
  inline Address raw_body_end() const;
  inline int raw_body_size() const;

  inline Address raw_instruction_start() const;
  inline Address InstructionStart() const;
  V8_EXPORT_PRIVATE Address OffHeapInstructionStart() const;

  inline Address raw_instruction_end() const;
  inline Address InstructionEnd() const;
  V8_EXPORT_PRIVATE Address OffHeapInstructionEnd() const;

190 191 192 193
  // When builtins un-embedding is enabled for the Isolate
  // (see Isolate::is_short_builtin_calls_enabled()) then both embedded and
  // un-embedded builtins might be exeuted and thus two kinds of |pc|s might
  // appear on the stack.
194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211
  // Unlike the paremeterless versions of the functions above the below variants
  // ensure that the instruction start correspond to the given |pc| value.
  // Thus for off-heap trampoline Code objects the result might be the
  // instruction start/end of the embedded code stream or of un-embedded one.
  // For normal Code objects these functions just return the
  // raw_instruction_start/end() values.
  // TODO(11527): remove these versions once the full solution is ready.
  inline Address InstructionStart(Isolate* isolate, Address pc) const;
  V8_EXPORT_PRIVATE Address OffHeapInstructionStart(Isolate* isolate,
                                                    Address pc) const;
  inline Address InstructionEnd(Isolate* isolate, Address pc) const;
  V8_EXPORT_PRIVATE Address OffHeapInstructionEnd(Isolate* isolate,
                                                  Address pc) const;

  // Computes offset of the |pc| from the instruction start. The |pc| must
  // belong to this code.
  inline int GetOffsetFromInstructionStart(Isolate* isolate, Address pc) const;

212 213 214 215 216 217
  inline int raw_instruction_size() const;
  inline void set_raw_instruction_size(int value);
  inline int InstructionSize() const;
  V8_EXPORT_PRIVATE int OffHeapInstructionSize() const;

  inline Address raw_metadata_start() const;
218
  inline Address MetadataStart() const;
219
  V8_EXPORT_PRIVATE Address OffHeapMetadataStart() const;
220
  inline Address raw_metadata_end() const;
221 222
  inline Address MetadataEnd() const;
  V8_EXPORT_PRIVATE Address OffHeapMetadataEnd() const;
223
  inline int raw_metadata_size() const;
224 225 226 227
  inline void set_raw_metadata_size(int value);
  inline int MetadataSize() const;
  int OffHeapMetadataSize() const;

228 229
  // The metadata section is aligned to this value.
  static constexpr int kMetadataAlignment = kIntSize;
230

231
  // [safepoint_table_offset]: The offset where the safepoint table starts.
232
  inline int safepoint_table_offset() const { return 0; }
233 234 235 236
  Address SafepointTableAddress() const;
  int safepoint_table_size() const;
  bool has_safepoint_table() const;

237 238
  // [handler_table_offset]: The offset where the exception handler table
  // starts.
239 240 241 242 243 244 245 246 247
  inline int handler_table_offset() const;
  inline void set_handler_table_offset(int offset);
  Address HandlerTableAddress() const;
  int handler_table_size() const;
  bool has_handler_table() const;

  // [constant_pool offset]: Offset of the constant pool.
  inline int constant_pool_offset() const;
  inline void set_constant_pool_offset(int offset);
248
  inline Address constant_pool() const;
249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266
  int constant_pool_size() const;
  bool has_constant_pool() const;

  // [code_comments_offset]: Offset of the code comment section.
  inline int code_comments_offset() const;
  inline void set_code_comments_offset(int offset);
  inline Address code_comments() const;
  V8_EXPORT_PRIVATE int code_comments_size() const;
  V8_EXPORT_PRIVATE bool has_code_comments() const;

  // [unwinding_info_offset]: Offset of the unwinding info section.
  inline int32_t unwinding_info_offset() const;
  inline void set_unwinding_info_offset(int32_t offset);
  inline Address unwinding_info_start() const;
  inline Address unwinding_info_end() const;
  inline int unwinding_info_size() const;
  inline bool has_unwinding_info() const;

267
#ifdef ENABLE_DISASSEMBLER
268
  const char* GetName(Isolate* isolate) const;
269
  V8_EXPORT_PRIVATE void Disassemble(const char* name, std::ostream& os,
270
                                     Isolate* isolate,
271
                                     Address current_pc = kNullAddress);
272 273 274
#endif

  // [relocation_info]: Code relocation information
275
  DECL_ACCESSORS(relocation_info, ByteArray)
276 277
  DECL_RELEASE_ACQUIRE_ACCESSORS(relocation_info, ByteArray)
  DECL_ACCESSORS(relocation_info_or_undefined, HeapObject)
278 279 280

  // This function should be called only from GC.
  void ClearEmbeddedObjects(Heap* heap);
281

282 283
  // [deoptimization_data]: Array containing data for deopt for non-baseline
  // code.
284
  DECL_ACCESSORS(deoptimization_data, FixedArray)
285 286 287
  // [bytecode_or_interpreter_data]: BytecodeArray or InterpreterData for
  // baseline code.
  DECL_ACCESSORS(bytecode_or_interpreter_data, HeapObject)
288

289 290 291 292 293 294
  // [source_position_table]: ByteArray for the source positions table for
  // non-baseline code.
  DECL_ACCESSORS(source_position_table, ByteArray)
  // [bytecode_offset_table]: ByteArray for the bytecode offset for baseline
  // code.
  DECL_ACCESSORS(bytecode_offset_table, ByteArray)
295 296 297

  // If source positions have not been collected or an exception has been thrown
  // this will return empty_byte_array.
298
  inline ByteArray SourcePositionTable(SharedFunctionInfo sfi) const;
299

300
  // [code_data_container]: A container indirection for all mutable fields.
301
  DECL_RELEASE_ACQUIRE_ACCESSORS(code_data_container, CodeDataContainer)
302

303
  // [next_code_link]: Link for lists of optimized or deoptimized code.
304
  // Note that this field is stored in the {CodeDataContainer} to be mutable.
305 306
  inline Object next_code_link() const;
  inline void set_next_code_link(Object value);
307 308

  // Unchecked accessors to be used during GC.
309
  inline ByteArray unchecked_relocation_info() const;
310
  inline HeapObject synchronized_unchecked_relocation_info_or_undefined() const;
311 312 313 314

  inline int relocation_size() const;

  // [kind]: Access to specific code kind.
315
  inline CodeKind kind() const;
316 317 318 319 320 321 322

  inline bool is_optimized_code() const;
  inline bool is_wasm_code() const;

  // Testers for interpreter builtins.
  inline bool is_interpreter_trampoline_builtin() const;

323
  // Testers for baseline builtins.
324
  inline bool is_baseline_trampoline_builtin() const;
325 326
  inline bool is_baseline_leave_frame_builtin() const;

327 328 329 330
  // Tells whether the code checks the optimization marker in the function's
  // feedback vector.
  inline bool checks_optimization_marker() const;

331
  // Tells whether the outgoing parameters of this code are tagged pointers.
332
  inline bool has_tagged_outgoing_params() const;
333

334 335
  // [is_turbofanned]: Tells whether the code object was generated by the
  // TurboFan optimizing compiler.
336 337
  inline bool is_turbofanned() const;

338 339
  // [can_have_weak_objects]: If CodeKindIsOptimizedJSFunction(kind), tells
  // whether the embedded objects in code should be treated weakly.
340 341 342
  inline bool can_have_weak_objects() const;
  inline void set_can_have_weak_objects(bool value);

343
  // [builtin]: For builtins, tells which builtin index the code object
344
  // has. The builtin index is a non-negative integer for builtins, and
345
  // Builtin::kNoBuiltinId (-1) otherwise.
346 347
  inline Builtin builtin_id() const;
  inline void set_builtin_id(Builtin builtin);
348 349
  inline bool is_builtin() const;

350 351 352
  inline unsigned inlined_bytecode_size() const;
  inline void set_inlined_bytecode_size(unsigned size);

353 354 355
  inline bool has_safepoint_info() const;

  // [stack_slots]: If {has_safepoint_info()}, the number of stack slots
356
  // reserved in the code prologue.
357
  inline int stack_slots() const;
358

359
  // [marked_for_deoptimization]: If CodeKindCanDeoptimize(kind), tells whether
360
  // the code is going to be deoptimized.
361 362 363
  inline bool marked_for_deoptimization() const;
  inline void set_marked_for_deoptimization(bool flag);

364 365 366 367
  // [deoptimization_count]: If CodeKindCanDeoptimize(kind). In turboprop we
  // retain the deoptimized code on soft deopts for a certain number of soft
  // deopts. This field keeps track of the number of deoptimizations we have
  // seen so far.
368 369 370
  inline int deoptimization_count() const;
  inline void increment_deoptimization_count();

371 372 373 374
  // [embedded_objects_cleared]: If CodeKindIsOptimizedJSFunction(kind), tells
  // whether the embedded objects in the code marked for deoptimization were
  // cleared. Note that embedded_objects_cleared() implies
  // marked_for_deoptimization().
375 376 377
  inline bool embedded_objects_cleared() const;
  inline void set_embedded_objects_cleared(bool flag);

378
  // [deopt_already_counted]: If CodeKindCanDeoptimize(kind), tells whether
379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394
  // the code was already deoptimized.
  inline bool deopt_already_counted() const;
  inline void set_deopt_already_counted(bool flag);

  // [is_promise_rejection]: For kind BUILTIN tells whether the
  // exception thrown by the code will lead to promise rejection or
  // uncaught if both this and is_exception_caught is set.
  // Use GetBuiltinCatchPrediction to access this.
  inline void set_is_promise_rejection(bool flag);

  // [is_exception_caught]: For kind BUILTIN tells whether the
  // exception thrown by the code will be caught internally or
  // uncaught if both this and is_promise_rejection is set.
  // Use GetBuiltinCatchPrediction to access this.
  inline void set_is_exception_caught(bool flag);

395 396 397 398
  // [is_off_heap_trampoline]: For kind BUILTIN tells whether
  // this is a trampoline to an off-heap builtin.
  inline bool is_off_heap_trampoline() const;

399
  // Get the safepoint entry for the given pc.
400
  SafepointEntry GetSafepointEntry(Isolate* isolate, Address pc);
401 402 403 404 405 406 407 408 409 410 411

  // The entire code object including its header is copied verbatim to the
  // snapshot so that it can be written in one, fast, memcpy during
  // deserialization. The deserializer will overwrite some pointers, rather
  // like a runtime linker, but the random allocation addresses used in the
  // mksnapshot process would still be present in the unlinked snapshot data,
  // which would make snapshot production non-reproducible. This method wipes
  // out the to-be-overwritten header data for reproducible snapshots.
  inline void WipeOutHeader();

  // Clear uninitialized padding space. This ensures that the snapshot content
412
  // is deterministic. Depending on the V8 build mode there could be no padding.
413 414 415
  inline void clear_padding();
  // Initialize the flags field. Similar to clear_padding above this ensure that
  // the snapshot content is deterministic.
416 417
  inline void initialize_flags(CodeKind kind, bool is_turbofanned,
                               int stack_slots, bool is_off_heap_trampoline);
418 419

  // Convert a target address into a code object.
420
  static inline Code GetCodeFromTargetAddress(Address address);
421 422

  // Convert an entry address into an object.
423
  static inline Code GetObjectFromEntryAddress(Address location_of_address);
424 425

  // Returns the size of code and its metadata. This includes the size of code
426
  // relocation information, deoptimization data.
427 428 429 430 431
  inline int SizeIncludingMetadata() const;

  // Returns the address of the first relocation info (read backwards!).
  inline byte* relocation_start() const;

432 433 434
  // Returns the address right after the relocation info (read backwards!).
  inline byte* relocation_end() const;

435
  // Code entry point.
436
  inline Address entry() const;
437 438

  // Returns true if pc is inside this object's instructions.
439
  inline bool contains(Isolate* isolate, Address pc);
440 441 442 443 444

  // Relocate the code by delta bytes. Called to signal that this code
  // object has been moved by delta bytes.
  void Relocate(intptr_t delta);

445
  // Migrate code from desc without flushing the instruction cache.
446 447
  void CopyFromNoFlush(ByteArray reloc_info, Heap* heap, const CodeDesc& desc);
  void RelocateFromDesc(ByteArray reloc_info, Heap* heap, const CodeDesc& desc);
448

449 450 451 452
#ifdef VERIFY_HEAP
  void VerifyRelocInfo(Isolate* isolate, ByteArray reloc_info);
#endif

453 454
  // Copy the RelocInfo portion of |desc| to |dest|. The ByteArray must be
  // exactly the same size as the RelocInfo in |desc|.
455
  static inline void CopyRelocInfoToByteArray(ByteArray dest,
456 457
                                              const CodeDesc& desc);

458 459 460 461 462 463
  inline uintptr_t GetBaselineStartPCForBytecodeOffset(int bytecode_offset,
                                                       BytecodeArray bytecodes);

  inline uintptr_t GetBaselineEndPCForBytecodeOffset(int bytecode_offset,
                                                     BytecodeArray bytecodes);

464 465 466 467 468 469 470 471
  // Returns the PC of the next bytecode in execution order.
  // If the bytecode at the given offset is JumpLoop, the PC of the jump target
  // is returned. Other jumps are not allowed.
  // For other bytecodes this is equivalent to
  // GetBaselineEndPCForBytecodeOffset.
  inline uintptr_t GetBaselinePCForNextExecutedBytecode(
      int bytecode_offset, BytecodeArray bytecodes);

472 473
  inline int GetBytecodeOffsetForBaselinePC(Address baseline_pc,
                                            BytecodeArray bytecodes);
474

475
  // Flushes the instruction cache for the executable instructions of this code
476
  // object. Make sure to call this while the code is still writable.
477 478
  void FlushICache() const;

479 480 481 482 483
  // Returns the object size for a given body (used for allocation).
  static int SizeFor(int body_size) {
    return RoundUp(kHeaderSize + body_size, kCodeAlignment);
  }

484
  DECL_CAST(Code)
485 486 487 488 489 490 491

  // Dispatched behavior.
  inline int CodeSize() const;

  DECL_PRINTER(Code)
  DECL_VERIFIER(Code)

492
  bool CanDeoptAt(Isolate* isolate, Address pc);
493

494 495
  void SetMarkedForDeoptimization(const char* reason);

496 497
  inline HandlerTable::CatchPrediction GetBuiltinCatchPrediction();

498
  bool IsIsolateIndependent(Isolate* isolate);
499

500 501
  inline bool CanContainWeakObjects();

502
  inline bool IsWeakObject(HeapObject object);
503

504
  static inline bool IsWeakObjectInOptimizedCode(HeapObject object);
505

506 507 508 509 510
  // Returns false if this is an embedded builtin Code object that's in
  // read_only_space and hence doesn't have execute permissions.
  inline bool IsExecutable();

  // Returns true if the function is inlined in the code.
511
  bool Inlines(SharedFunctionInfo sfi);
512

513
  class OptimizedCodeIterator;
514 515

  // Layout description.
516 517
#define CODE_FIELDS(V)                                                        \
  V(kRelocationInfoOffset, kTaggedSize)                                       \
518
  V(kDeoptimizationDataOrInterpreterDataOffset, kTaggedSize)                  \
519
  V(kPositionTableOffset, kTaggedSize)                                        \
520 521 522 523 524 525 526 527 528 529 530 531 532 533 534 535 536 537 538 539
  V(kCodeDataContainerOffset, kTaggedSize)                                    \
  /* Data or code not directly visited by GC directly starts here. */         \
  /* The serializer needs to copy bytes starting from here verbatim. */       \
  /* Objects embedded into code is visited via reloc info. */                 \
  V(kDataStart, 0)                                                            \
  V(kInstructionSizeOffset, kIntSize)                                         \
  V(kMetadataSizeOffset, kIntSize)                                            \
  V(kFlagsOffset, kInt32Size)                                                 \
  V(kBuiltinIndexOffset, kIntSize)                                            \
  V(kInlinedBytecodeSizeOffset, kIntSize)                                     \
  /* Offsets describing inline metadata tables, relative to MetadataStart. */ \
  V(kHandlerTableOffsetOffset, kIntSize)                                      \
  V(kConstantPoolOffsetOffset,                                                \
    FLAG_enable_embedded_constant_pool ? kIntSize : 0)                        \
  V(kCodeCommentsOffsetOffset, kIntSize)                                      \
  V(kUnwindingInfoOffsetOffset, kInt32Size)                                   \
  V(kUnalignedHeaderSize, 0)                                                  \
  /* Add padding to align the instruction start following right after */      \
  /* the Code object header. */                                               \
  V(kOptionalPaddingOffset, CODE_POINTER_PADDING(kOptionalPaddingOffset))     \
540 541 542 543
  V(kHeaderSize, 0)

  DEFINE_FIELD_OFFSET_CONSTANTS(HeapObject::kHeaderSize, CODE_FIELDS)
#undef CODE_FIELDS
544

545 546 547
  // This documents the amount of free space we have in each Code object header
  // due to padding for code alignment.
#if V8_TARGET_ARCH_ARM64
548
  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 12 : 24;
549
#elif V8_TARGET_ARCH_MIPS64
550
  static constexpr int kHeaderPaddingSize = 24;
551 552
#elif V8_TARGET_ARCH_LOONG64
  static constexpr int kHeaderPaddingSize = 24;
553
#elif V8_TARGET_ARCH_X64
554
  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 12 : 56;
555
#elif V8_TARGET_ARCH_ARM
556
  static constexpr int kHeaderPaddingSize = 12;
557
#elif V8_TARGET_ARCH_IA32
558
  static constexpr int kHeaderPaddingSize = 12;
559
#elif V8_TARGET_ARCH_MIPS
560
  static constexpr int kHeaderPaddingSize = 12;
561
#elif V8_TARGET_ARCH_PPC64
562
  static constexpr int kHeaderPaddingSize =
563 564
      FLAG_enable_embedded_constant_pool ? (COMPRESS_POINTERS_BOOL ? 8 : 20)
                                         : (COMPRESS_POINTERS_BOOL ? 12 : 24);
565
#elif V8_TARGET_ARCH_S390X
566
  static constexpr int kHeaderPaddingSize = COMPRESS_POINTERS_BOOL ? 12 : 24;
Brice Dobry's avatar
Brice Dobry committed
567
#elif V8_TARGET_ARCH_RISCV64
568
  static constexpr int kHeaderPaddingSize = (COMPRESS_POINTERS_BOOL ? 12 : 24);
569 570 571
#else
#error Unknown architecture.
#endif
572
  STATIC_ASSERT(FIELD_SIZE(kOptionalPaddingOffset) == kHeaderPaddingSize);
573

574 575
  class BodyDescriptor;

576
  // Flags layout.  base::BitField<type, shift, size>.
577
#define CODE_FLAGS_BIT_FIELDS(V, _)    \
578
  V(KindField, CodeKind, 4, _)         \
579
  V(IsTurbofannedField, bool, 1, _)    \
580 581
  V(StackSlotsField, int, 24, _)       \
  V(IsOffHeapTrampoline, bool, 1, _)
582 583
  DEFINE_BIT_FIELDS(CODE_FLAGS_BIT_FIELDS)
#undef CODE_FLAGS_BIT_FIELDS
584
  STATIC_ASSERT(kCodeKindCount <= KindField::kNumValues);
585
  STATIC_ASSERT(CODE_FLAGS_BIT_FIELDS_Ranges::kBitsCount == 30);
586 587
  STATIC_ASSERT(CODE_FLAGS_BIT_FIELDS_Ranges::kBitsCount <=
                FIELD_SIZE(kFlagsOffset) * kBitsPerByte);
588

589
  // KindSpecificFlags layout.
590 591
#define CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS(V, _) \
  V(MarkedForDeoptimizationField, bool, 1, _)     \
592
  V(EmbeddedObjectsClearedField, bool, 1, _)      \
593 594 595
  V(DeoptAlreadyCountedField, bool, 1, _)         \
  V(CanHaveWeakObjectsField, bool, 1, _)          \
  V(IsPromiseRejectionField, bool, 1, _)          \
596 597
  V(IsExceptionCaughtField, bool, 1, _)           \
  V(DeoptCountField, int, 4, _)
598 599
  DEFINE_BIT_FIELDS(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS)
#undef CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS
600 601 602 603
  STATIC_ASSERT(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS_Ranges::kBitsCount == 10);
  STATIC_ASSERT(CODE_KIND_SPECIFIC_FLAGS_BIT_FIELDS_Ranges::kBitsCount <=
                FIELD_SIZE(CodeDataContainer::kKindSpecificFlagsOffset) *
                    kBitsPerByte);
604 605 606 607

  // The {marked_for_deoptimization} field is accessed from generated code.
  static const int kMarkedForDeoptimizationBit =
      MarkedForDeoptimizationField::kShift;
608 609

  static const int kArgumentsBits = 16;
610 611
  // Reserve one argument count value as the "don't adapt arguments" sentinel.
  static const int kMaxArguments = (1 << kArgumentsBits) - 2;
612 613 614

 private:
  friend class RelocIterator;
615 616 617
  friend class EvacuateVisitorBase;

  inline CodeDataContainer GCSafeCodeDataContainer(AcquireLoadTag) const;
618 619 620 621

  bool is_promise_rejection() const;
  bool is_exception_caught() const;

622 623 624 625 626 627 628 629 630 631 632
  enum BytecodeToPCPosition {
    kPcAtStartOfBytecode,
    // End of bytecode equals the start of the next bytecode.
    // We need it when we deoptimize to the next bytecode (lazy deopt or deopt
    // of non-topmost frame).
    kPcAtEndOfBytecode
  };
  inline uintptr_t GetBaselinePCForBytecodeOffset(int bytecode_offset,
                                                  BytecodeToPCPosition position,
                                                  BytecodeArray bytecodes);

633
  OBJECT_CONSTRUCTORS(Code, HeapObject);
634 635 636 637 638
};

class Code::OptimizedCodeIterator {
 public:
  explicit OptimizedCodeIterator(Isolate* isolate);
639 640
  OptimizedCodeIterator(const OptimizedCodeIterator&) = delete;
  OptimizedCodeIterator& operator=(const OptimizedCodeIterator&) = delete;
641 642 643
  Code Next();

 private:
644
  NativeContext next_context_;
645 646 647
  Code current_code_;
  Isolate* isolate_;

648
  DISALLOW_GARBAGE_COLLECTION(no_gc)
649 650
};

651 652 653 654
// Helper functions for converting Code objects to CodeDataContainer and back
// when V8_EXTERNAL_CODE_SPACE is enabled.
inline CodeT ToCodeT(Code code);
inline Code FromCodeT(CodeT code);
655
inline Code FromCodeT(CodeT code, RelaxedLoadTag);
656 657 658 659
inline Code FromCodeT(CodeT code, AcquireLoadTag);
inline Code FromCodeT(CodeT code, PtrComprCageBase);
inline Code FromCodeT(CodeT code, PtrComprCageBase, RelaxedLoadTag);
inline Code FromCodeT(CodeT code, PtrComprCageBase, AcquireLoadTag);
660 661
inline CodeDataContainer CodeDataContainerFromCodeT(CodeT code);

662
class AbstractCode : public HeapObject {
663
 public:
664
  NEVER_READ_ONLY_SPACE
665 666 667 668 669

  int SourcePosition(int offset);
  int SourceStatementPosition(int offset);

  // Returns the address of the first instruction.
670
  inline Address raw_instruction_start();
671

672 673 674 675 676
  // Returns the address of the first instruction. For off-heap code objects
  // this differs from instruction_start (which would point to the off-heap
  // trampoline instead).
  inline Address InstructionStart();

677
  // Returns the address right after the last instruction.
678
  inline Address raw_instruction_end();
679

680 681 682 683 684
  // Returns the address right after the last instruction. For off-heap code
  // objects this differs from instruction_end (which would point to the
  // off-heap trampoline instead).
  inline Address InstructionEnd();

685
  // Returns the size of the code instructions.
686
  inline int raw_instruction_size();
687

688 689
  // Returns the size of the native instructions, including embedded
  // data such as the safepoints table. For off-heap code objects
690 691
  // this may differ from instruction_size in that this will return the size of
  // the off-heap instruction stream rather than the on-heap trampoline located
692 693 694
  // at instruction_start.
  inline int InstructionSize();

695 696
  // Return the source position table for interpreter code.
  inline ByteArray SourcePositionTable(SharedFunctionInfo sfi);
697 698 699 700 701 702 703

  void DropStackFrameCache();

  // Returns the size of instructions and the metadata.
  inline int SizeIncludingMetadata();

  // Returns true if pc is inside this object's instructions.
704
  inline bool contains(Address pc);
705

706 707
  // Returns the kind of the code.
  inline CodeKind kind();
708

709
  DECL_CAST(AbstractCode)
710
  inline Code GetCode();
711
  inline BytecodeArray GetBytecodeArray();
712 713 714 715

  // Max loop nesting marker used to postpose OSR. We don't take loop
  // nesting that is deeper than 5 levels into account.
  static const int kMaxLoopNestingMarker = 6;
716

717
  OBJECT_CONSTRUCTORS(AbstractCode, HeapObject);
718 719 720

 private:
  inline ByteArray SourcePositionTableInternal();
721 722
};

723 724 725 726
// Dependent code is a singly linked list of weak fixed arrays. Each array
// contains weak pointers to code objects for one dependent group. The suffix of
// the array can be filled with the undefined value if the number of codes is
// less than the length of the array.
727 728 729 730 731 732 733 734 735 736 737
//
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
// | next | count & group 1 | code 1 | code 2 | ... | code n | undefined | ... |
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
//    |
//    V
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
// | next | count & group 2 | code 1 | code 2 | ... | code m | undefined | ... |
// +------+-----------------+--------+--------+-----+--------+-----------+-----+
//    |
//    V
738
// empty_weak_fixed_array()
739
//
740
// The list of weak fixed arrays is ordered by dependency groups.
741

742
class DependentCode : public WeakFixedArray {
743
 public:
744
  DECL_CAST(DependentCode)
745

746 747 748 749 750
  enum DependencyGroup {
    // Group of code that embed a transition to this map, and depend on being
    // deoptimized when the transition is replaced by a new version.
    kTransitionGroup,
    // Group of code that omit run-time prototype checks for prototypes
751 752 753 754 755 756 757 758
    // described by this map. The group is deoptimized whenever the following
    // conditions hold, possibly invalidating the assumptions embedded in the
    // code:
    // a) A fast-mode object described by this map changes shape (and
    // transitions to a new map), or
    // b) A dictionary-mode prototype described by this map changes shape, the
    // const-ness of one of its properties changes, or its [[Prototype]]
    // changes (only the latter causes a transition).
759 760 761 762 763 764
    kPrototypeCheckGroup,
    // Group of code that depends on global property values in property cells
    // not being changed.
    kPropertyCellChangedGroup,
    // Group of code that omit run-time checks for field(s) introduced by
    // this map, i.e. for the field type.
765 766 767
    kFieldTypeGroup,
    kFieldConstGroup,
    kFieldRepresentationGroup,
768 769 770 771 772 773 774 775 776 777 778
    // Group of code that omit run-time type checks for initial maps of
    // constructors.
    kInitialMapChangedGroup,
    // Group of code that depends on tenuring information in AllocationSites
    // not being changed.
    kAllocationSiteTenuringChangedGroup,
    // Group of code that depends on element transition information in
    // AllocationSites not being changed.
    kAllocationSiteTransitionChangedGroup
  };

779
  // Register a dependency of {code} on {object}, of the kind given by {group}.
780
  V8_EXPORT_PRIVATE static void InstallDependency(Isolate* isolate,
781
                                                  Handle<Code> code,
782 783
                                                  Handle<HeapObject> object,
                                                  DependencyGroup group);
784

785
  void DeoptimizeDependentCodeGroup(DependencyGroup group);
786

787
  bool MarkCodeForDeoptimization(DependencyGroup group);
788

789
  // The following low-level accessors are exposed only for tests.
790
  inline DependencyGroup group();
791
  inline MaybeObject object_at(int i);
792
  inline int count();
793
  inline DependentCode next_link();
794

795
 private:
796 797
  static const char* DependencyGroupName(DependencyGroup group);

798
  // Get/Set {object}'s {DependentCode}.
799
  static DependentCode GetDependentCode(Handle<HeapObject> object);
800 801
  static void SetDependentCode(Handle<HeapObject> object,
                               Handle<DependentCode> dep);
802

803
  static Handle<DependentCode> New(Isolate* isolate, DependencyGroup group,
804
                                   Handle<Code> code,
805
                                   Handle<DependentCode> next);
806 807 808 809
  static Handle<DependentCode> EnsureSpace(Isolate* isolate,
                                           Handle<DependentCode> entries);
  static Handle<DependentCode> InsertWeakCode(Isolate* isolate,
                                              Handle<DependentCode> entries,
810
                                              DependencyGroup group,
811
                                              Handle<Code> code);
812

813 814 815
  // Compact by removing cleared weak cells and return true if there was
  // any cleared weak cell.
  bool Compact();
816

817 818 819 820
  static int Grow(int number_of_entries) {
    if (number_of_entries < 5) return number_of_entries + 1;
    return number_of_entries * 5 / 4;
  }
821 822 823 824 825 826

  static const int kGroupCount = kAllocationSiteTransitionChangedGroup + 1;
  static const int kNextLinkIndex = 0;
  static const int kFlagsIndex = 1;
  static const int kCodesStartIndex = 2;

827
  inline void set_next_link(DependentCode next);
828
  inline void set_count(int value);
829
  inline void set_object_at(int i, MaybeObject object);
830 831 832
  inline void clear_at(int i);
  inline void copy(int from, int to);

833 834
  inline int flags();
  inline void set_flags(int flags);
835 836
  using GroupField = base::BitField<int, 0, 5>;
  using CountField = base::BitField<int, 5, 27>;
837
  STATIC_ASSERT(kGroupCount <= GroupField::kMax + 1);
838

839
  OBJECT_CONSTRUCTORS(DependentCode, WeakFixedArray);
840 841 842
};

// BytecodeArray represents a sequence of interpreter bytecodes.
843 844
class BytecodeArray
    : public TorqueGeneratedBytecodeArray<BytecodeArray, FixedArrayBase> {
845 846 847 848 849 850 851 852 853 854 855 856 857 858 859
 public:
  enum Age {
    kNoAgeBytecodeAge = 0,
    kQuadragenarianBytecodeAge,
    kQuinquagenarianBytecodeAge,
    kSexagenarianBytecodeAge,
    kSeptuagenarianBytecodeAge,
    kOctogenarianBytecodeAge,
    kAfterLastBytecodeAge,
    kFirstBytecodeAge = kNoAgeBytecodeAge,
    kLastBytecodeAge = kAfterLastBytecodeAge - 1,
    kBytecodeAgeCount = kAfterLastBytecodeAge - kFirstBytecodeAge - 1,
    kIsOldBytecodeAge = kSexagenarianBytecodeAge
  };

860
  static constexpr int SizeFor(int length) {
861 862 863 864
    return OBJECT_POINTER_ALIGN(kHeaderSize + length);
  }

  // Setter and getter
865
  inline byte get(int index) const;
866 867 868 869 870 871
  inline void set(int index, byte value);

  // Returns data start address.
  inline Address GetFirstBytecodeAddress();

  // Accessors for frame size.
872 873
  inline int32_t frame_size() const;
  inline void set_frame_size(int32_t frame_size);
874 875 876 877 878

  // Accessor for register count (derived from frame_size).
  inline int register_count() const;

  // Accessors for parameter count (including implicit 'this' receiver).
879 880
  inline int32_t parameter_count() const;
  inline void set_parameter_count(int32_t number_of_parameters);
881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896

  // Register used to pass the incoming new.target or generator object from the
  // fucntion call.
  inline interpreter::Register incoming_new_target_or_generator_register()
      const;
  inline void set_incoming_new_target_or_generator_register(
      interpreter::Register incoming_new_target_or_generator_register);

  // Accessors for OSR loop nesting level.
  inline int osr_loop_nesting_level() const;
  inline void set_osr_loop_nesting_level(int depth);

  // Accessors for bytecode's code age.
  inline Age bytecode_age() const;
  inline void set_bytecode_age(Age age);

897 898
  inline bool HasSourcePositionTable() const;
  inline bool DidSourcePositionGenerationFail() const;
899

900 901 902 903
  // If source positions have not been collected or an exception has been thrown
  // this will return empty_byte_array.
  inline ByteArray SourcePositionTable() const;

904 905 906 907 908 909
  // Indicates that an attempt was made to collect source positions, but that it
  // failed most likely due to stack exhaustion. When in this state
  // |SourcePositionTable| will return an empty byte array rather than crashing
  // as it would if no attempt was ever made to collect source positions.
  inline void SetSourcePositionsFailedToCollect();

910 911 912
  // Dispatched behavior.
  inline int BytecodeArraySize();

913
  inline int raw_instruction_size();
914 915 916 917 918

  // Returns the size of bytecode and its metadata. This includes the size of
  // bytecode, constant pool, source position table, and handler table.
  inline int SizeIncludingMetadata();

919
  DECL_PRINTER(BytecodeArray)
920 921
  DECL_VERIFIER(BytecodeArray)

922
  V8_EXPORT_PRIVATE void Disassemble(std::ostream& os);
923

924
  void CopyBytecodesTo(BytecodeArray to);
925 926

  // Bytecode aging
927 928
  V8_EXPORT_PRIVATE bool IsOld() const;
  V8_EXPORT_PRIVATE void MakeOlder();
929 930 931 932

  // Clear uninitialized padding space. This ensures that the snapshot content
  // is deterministic.
  inline void clear_padding();
933

934 935 936
  // InterpreterEntryTrampoline expects these fields to be next to each other
  // and writes a 16-bit value to reset them.
  STATIC_ASSERT(BytecodeArray::kBytecodeAgeOffset ==
937
                kOsrLoopNestingLevelOffset + kCharSize);
938

939 940 941 942 943 944 945
  // Maximal memory consumption for a single BytecodeArray.
  static const int kMaxSize = 512 * MB;
  // Maximal length of a single BytecodeArray.
  static const int kMaxLength = kMaxSize - kHeaderSize;

  class BodyDescriptor;

946 947 948 949 950
 private:
  // Hide accessors inherited from generated class. Use parameter_count instead.
  DECL_INT_ACCESSORS(parameter_size)

  TQ_OBJECT_CONSTRUCTORS(BytecodeArray)
951 952
};

953 954 955
// DeoptimizationData is a fixed array used to hold the deoptimization data for
// optimized code.  It also contains information about functions that were
// inlined.  If N different functions were inlined then the first N elements of
956 957 958
// the literal array will contain these functions.
//
// It can be empty.
959
class DeoptimizationData : public FixedArray {
960 961 962 963 964 965 966 967 968
 public:
  // Layout description.  Indices in the array.
  static const int kTranslationByteArrayIndex = 0;
  static const int kInlinedFunctionCountIndex = 1;
  static const int kLiteralArrayIndex = 2;
  static const int kOsrBytecodeOffsetIndex = 3;
  static const int kOsrPcOffsetIndex = 4;
  static const int kOptimizationIdIndex = 5;
  static const int kSharedFunctionInfoIndex = 6;
969
  static const int kInliningPositionsIndex = 7;
970
  static const int kDeoptExitStartIndex = 8;
971 972 973
  static const int kEagerSoftAndBailoutDeoptCountIndex = 9;
  static const int kLazyDeoptCountIndex = 10;
  static const int kFirstDeoptEntryIndex = 11;
974 975 976 977 978

  // Offsets of deopt entry elements relative to the start of the entry.
  static const int kBytecodeOffsetRawOffset = 0;
  static const int kTranslationIndexOffset = 1;
  static const int kPcOffset = 2;
979 980 981 982
#ifdef DEBUG
  static const int kNodeIdOffset = 3;
  static const int kDeoptEntrySize = 4;
#else   // DEBUG
983
  static const int kDeoptEntrySize = 3;
984
#endif  // DEBUG
985 986 987

// Simple element accessors.
#define DECL_ELEMENT_ACCESSORS(name, type) \
988
  inline type name() const;                \
989
  inline void Set##name(type value);
990

991
  DECL_ELEMENT_ACCESSORS(TranslationByteArray, TranslationArray)
992
  DECL_ELEMENT_ACCESSORS(InlinedFunctionCount, Smi)
993
  DECL_ELEMENT_ACCESSORS(LiteralArray, FixedArray)
994 995 996
  DECL_ELEMENT_ACCESSORS(OsrBytecodeOffset, Smi)
  DECL_ELEMENT_ACCESSORS(OsrPcOffset, Smi)
  DECL_ELEMENT_ACCESSORS(OptimizationId, Smi)
997
  DECL_ELEMENT_ACCESSORS(SharedFunctionInfo, Object)
998
  DECL_ELEMENT_ACCESSORS(InliningPositions, PodArray<InliningPosition>)
999
  DECL_ELEMENT_ACCESSORS(DeoptExitStart, Smi)
1000 1001
  DECL_ELEMENT_ACCESSORS(EagerSoftAndBailoutDeoptCount, Smi)
  DECL_ELEMENT_ACCESSORS(LazyDeoptCount, Smi)
1002 1003 1004 1005 1006

#undef DECL_ELEMENT_ACCESSORS

// Accessors for elements of the ith deoptimization entry.
#define DECL_ENTRY_ACCESSORS(name, type) \
1007
  inline type name(int i) const;         \
1008
  inline void Set##name(int i, type value);
1009 1010 1011 1012

  DECL_ENTRY_ACCESSORS(BytecodeOffsetRaw, Smi)
  DECL_ENTRY_ACCESSORS(TranslationIndex, Smi)
  DECL_ENTRY_ACCESSORS(Pc, Smi)
1013 1014 1015
#ifdef DEBUG
  DECL_ENTRY_ACCESSORS(NodeId, Smi)
#endif  // DEBUG
1016 1017 1018

#undef DECL_ENTRY_ACCESSORS

1019
  inline BytecodeOffset GetBytecodeOffset(int i);
1020

1021
  inline void SetBytecodeOffset(int i, BytecodeOffset value);
1022 1023 1024 1025 1026 1027 1028

  inline int DeoptCount();

  static const int kNotInlinedIndex = -1;

  // Returns the inlined function at the given position in LiteralArray, or the
  // outer function if index == kNotInlinedIndex.
1029
  class SharedFunctionInfo GetInlinedFunction(int index);
1030

1031 1032
  // Allocates a DeoptimizationData.
  static Handle<DeoptimizationData> New(Isolate* isolate, int deopt_entry_count,
1033
                                        AllocationType allocation);
1034

1035
  // Return an empty DeoptimizationData.
1036
  V8_EXPORT_PRIVATE static Handle<DeoptimizationData> Empty(Isolate* isolate);
1037

1038
  DECL_CAST(DeoptimizationData)
1039 1040

#ifdef ENABLE_DISASSEMBLER
1041
  void DeoptimizationDataPrint(std::ostream& os);
1042 1043 1044 1045 1046 1047 1048 1049
#endif

 private:
  static int IndexForEntry(int i) {
    return kFirstDeoptEntryIndex + (i * kDeoptEntrySize);
  }

  static int LengthFor(int entry_count) { return IndexForEntry(entry_count); }
1050

1051
  OBJECT_CONSTRUCTORS(DeoptimizationData, FixedArray);
1052 1053 1054 1055 1056 1057 1058 1059
};

}  // namespace internal
}  // namespace v8

#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_CODE_H_