code-inl.h 23.1 KB
Newer Older
1 2 3 4 5 6 7 8 9
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.

#ifndef V8_OBJECTS_CODE_INL_H_
#define V8_OBJECTS_CODE_INL_H_

#include "src/objects/code.h"

10
#include "src/isolate.h"
11 12 13 14 15 16 17 18 19 20 21
#include "src/objects/dictionary.h"
#include "src/v8memory.h"

// Has to be the last include (doesn't have include guards):
#include "src/objects/object-macros.h"

namespace v8 {
namespace internal {

TYPE_CHECKER(BytecodeArray, BYTECODE_ARRAY_TYPE)
TYPE_CHECKER(Code, CODE_TYPE)
22
TYPE_CHECKER(CodeDataContainer, CODE_DATA_CONTAINER_TYPE)
23 24 25 26

CAST_ACCESSOR(AbstractCode)
CAST_ACCESSOR(BytecodeArray)
CAST_ACCESSOR(Code)
27
CAST_ACCESSOR(CodeDataContainer)
28
CAST_ACCESSOR(DependentCode)
29
CAST_ACCESSOR(DeoptimizationData)
30

31
int AbstractCode::raw_instruction_size() {
32
  if (IsCode()) {
33
    return GetCode()->raw_instruction_size();
34 35 36 37 38
  } else {
    return GetBytecodeArray()->length();
  }
}

39 40 41 42 43 44 45 46
int AbstractCode::InstructionSize() {
  if (IsCode()) {
    return GetCode()->InstructionSize();
  } else {
    return GetBytecodeArray()->length();
  }
}

47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83
ByteArray* AbstractCode::source_position_table() {
  if (IsCode()) {
    return GetCode()->SourcePositionTable();
  } else {
    return GetBytecodeArray()->SourcePositionTable();
  }
}

Object* AbstractCode::stack_frame_cache() {
  Object* maybe_table;
  if (IsCode()) {
    maybe_table = GetCode()->source_position_table();
  } else {
    maybe_table = GetBytecodeArray()->source_position_table();
  }
  if (maybe_table->IsSourcePositionTableWithFrameCache()) {
    return SourcePositionTableWithFrameCache::cast(maybe_table)
        ->stack_frame_cache();
  }
  return Smi::kZero;
}

int AbstractCode::SizeIncludingMetadata() {
  if (IsCode()) {
    return GetCode()->SizeIncludingMetadata();
  } else {
    return GetBytecodeArray()->SizeIncludingMetadata();
  }
}
int AbstractCode::ExecutableSize() {
  if (IsCode()) {
    return GetCode()->ExecutableSize();
  } else {
    return GetBytecodeArray()->BytecodeArraySize();
  }
}

84
Address AbstractCode::raw_instruction_start() {
85
  if (IsCode()) {
86
    return GetCode()->raw_instruction_start();
87 88 89 90 91
  } else {
    return GetBytecodeArray()->GetFirstBytecodeAddress();
  }
}

92 93 94 95 96 97 98 99
Address AbstractCode::InstructionStart() {
  if (IsCode()) {
    return GetCode()->InstructionStart();
  } else {
    return GetBytecodeArray()->GetFirstBytecodeAddress();
  }
}

100
Address AbstractCode::raw_instruction_end() {
101
  if (IsCode()) {
102
    return GetCode()->raw_instruction_end();
103 104 105 106 107 108
  } else {
    return GetBytecodeArray()->GetFirstBytecodeAddress() +
           GetBytecodeArray()->length();
  }
}

109 110 111 112 113 114 115 116 117
Address AbstractCode::InstructionEnd() {
  if (IsCode()) {
    return GetCode()->InstructionEnd();
  } else {
    return GetBytecodeArray()->GetFirstBytecodeAddress() +
           GetBytecodeArray()->length();
  }
}

118
bool AbstractCode::contains(Address inner_pointer) {
119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175
  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
}

AbstractCode::Kind AbstractCode::kind() {
  if (IsCode()) {
    return static_cast<AbstractCode::Kind>(GetCode()->kind());
  } else {
    return INTERPRETED_FUNCTION;
  }
}

Code* AbstractCode::GetCode() { return Code::cast(this); }

BytecodeArray* AbstractCode::GetBytecodeArray() {
  return BytecodeArray::cast(this);
}

DependentCode* DependentCode::next_link() {
  return DependentCode::cast(get(kNextLinkIndex));
}

void DependentCode::set_next_link(DependentCode* next) {
  set(kNextLinkIndex, next);
}

int DependentCode::flags() { return Smi::ToInt(get(kFlagsIndex)); }

void DependentCode::set_flags(int flags) {
  set(kFlagsIndex, Smi::FromInt(flags));
}

int DependentCode::count() { return CountField::decode(flags()); }

void DependentCode::set_count(int value) {
  set_flags(CountField::update(flags(), value));
}

DependentCode::DependencyGroup DependentCode::group() {
  return static_cast<DependencyGroup>(GroupField::decode(flags()));
}

void DependentCode::set_group(DependentCode::DependencyGroup group) {
  set_flags(GroupField::update(flags(), static_cast<int>(group)));
}

void DependentCode::set_object_at(int i, Object* object) {
  set(kCodesStartIndex + i, object);
}

Object* DependentCode::object_at(int i) { return get(kCodesStartIndex + i); }

void DependentCode::clear_at(int i) { set_undefined(kCodesStartIndex + i); }

void DependentCode::copy(int from, int to) {
  set(kCodesStartIndex + to, get(kCodesStartIndex + from));
}

176
INT_ACCESSORS(Code, raw_instruction_size, kInstructionSizeOffset)
177
INT_ACCESSORS(Code, handler_table_offset, kHandlerTableOffsetOffset)
178 179 180 181 182 183
#define CODE_ACCESSORS(name, type, offset)           \
  ACCESSORS_CHECKED2(Code, name, type, offset, true, \
                     !GetHeap()->InNewSpace(value))
CODE_ACCESSORS(relocation_info, ByteArray, kRelocationInfoOffset)
CODE_ACCESSORS(deoptimization_data, FixedArray, kDeoptimizationDataOffset)
CODE_ACCESSORS(source_position_table, Object, kSourcePositionTableOffset)
184
CODE_ACCESSORS(code_data_container, CodeDataContainer, kCodeDataContainerOffset)
185 186 187 188 189 190
#undef CODE_ACCESSORS

void Code::WipeOutHeader() {
  WRITE_FIELD(this, kRelocationInfoOffset, nullptr);
  WRITE_FIELD(this, kDeoptimizationDataOffset, nullptr);
  WRITE_FIELD(this, kSourcePositionTableOffset, nullptr);
191
  WRITE_FIELD(this, kCodeDataContainerOffset, nullptr);
192 193 194
}

void Code::clear_padding() {
195 196
  memset(reinterpret_cast<void*>(address() + kHeaderPaddingStart), 0,
         kHeaderSize - kHeaderPaddingStart);
197
  Address data_end =
198
      has_unwinding_info() ? unwinding_info_end() : raw_instruction_end();
199 200
  memset(reinterpret_cast<void*>(data_end), 0,
         CodeSize() - (data_end - address()));
201 202 203 204 205 206 207 208 209 210 211 212
}

ByteArray* Code::SourcePositionTable() const {
  Object* maybe_table = source_position_table();
  if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
  DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
  return SourcePositionTableWithFrameCache::cast(maybe_table)
      ->source_position_table();
}

uint32_t Code::stub_key() const {
  DCHECK(is_stub());
213
  return READ_UINT32_FIELD(this, kStubKeyOffset);
214 215 216
}

void Code::set_stub_key(uint32_t key) {
217 218
  DCHECK(is_stub() || key == 0);  // Allow zero initialization.
  WRITE_UINT32_FIELD(this, kStubKeyOffset, key);
219 220
}

221 222 223 224 225 226 227 228
Object* Code::next_code_link() const {
  return code_data_container()->next_code_link();
}

void Code::set_next_code_link(Object* value) {
  code_data_container()->set_next_code_link(value);
}

229
int Code::InstructionSize() const {
230
#ifdef V8_EMBEDDED_BUILTINS
231
  if (Builtins::IsEmbeddedBuiltin(this)) return OffHeapInstructionSize();
232
#endif
233
  return raw_instruction_size();
234 235
}

236 237
Address Code::raw_instruction_start() const {
  return FIELD_ADDR_CONST(this, kHeaderSize);
238 239
}

240
Address Code::InstructionStart() const {
241
#ifdef V8_EMBEDDED_BUILTINS
242
  if (Builtins::IsEmbeddedBuiltin(this)) return OffHeapInstructionStart();
243
#endif
244
  return raw_instruction_start();
245 246
}

247
Address Code::raw_instruction_end() const {
248
  return raw_instruction_start() + raw_instruction_size();
249 250
}

251
Address Code::InstructionEnd() const {
252
#ifdef V8_EMBEDDED_BUILTINS
253
  if (Builtins::IsEmbeddedBuiltin(this)) return OffHeapInstructionEnd();
254
#endif
255
  return raw_instruction_end();
256 257
}

258 259
int Code::GetUnwindingInfoSizeOffset() const {
  DCHECK(has_unwinding_info());
260
  return RoundUp(kHeaderSize + raw_instruction_size(), kInt64Size);
261 262 263 264 265 266 267 268 269 270 271 272 273
}

int Code::unwinding_info_size() const {
  DCHECK(has_unwinding_info());
  return static_cast<int>(
      READ_UINT64_FIELD(this, GetUnwindingInfoSizeOffset()));
}

void Code::set_unwinding_info_size(int value) {
  DCHECK(has_unwinding_info());
  WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
}

274
Address Code::unwinding_info_start() const {
275
  DCHECK(has_unwinding_info());
276
  return FIELD_ADDR_CONST(this, GetUnwindingInfoSizeOffset()) + kInt64Size;
277 278
}

279
Address Code::unwinding_info_end() const {
280 281 282 283 284 285 286
  DCHECK(has_unwinding_info());
  return unwinding_info_start() + unwinding_info_size();
}

int Code::body_size() const {
  int unpadded_body_size =
      has_unwinding_info()
287 288
          ? static_cast<int>(unwinding_info_end() - raw_instruction_start())
          : raw_instruction_size();
289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306
  return RoundUp(unpadded_body_size, kObjectAlignment);
}

int Code::SizeIncludingMetadata() const {
  int size = CodeSize();
  size += relocation_info()->Size();
  size += deoptimization_data()->Size();
  return size;
}

ByteArray* Code::unchecked_relocation_info() const {
  return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
}

byte* Code::relocation_start() const {
  return unchecked_relocation_info()->GetDataStartAddress();
}

307 308 309 310 311
byte* Code::relocation_end() const {
  return unchecked_relocation_info()->GetDataStartAddress() +
         unchecked_relocation_info()->length();
}

312 313 314 315
int Code::relocation_size() const {
  return unchecked_relocation_info()->length();
}

316
Address Code::entry() const { return raw_instruction_start(); }
317

318
bool Code::contains(Address inner_pointer) {
319 320 321 322 323
  return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
}

int Code::ExecutableSize() const {
  // Check that the assumptions about the layout of the code object holds.
324
  DCHECK_EQ(static_cast<int>(raw_instruction_start() - address()),
325
            Code::kHeaderSize);
326
  return raw_instruction_size() + Code::kHeaderSize;
327 328 329 330 331 332 333 334
}

int Code::CodeSize() const { return SizeFor(body_size()); }

Code::Kind Code::kind() const {
  return KindField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
}

335 336
void Code::initialize_flags(Kind kind, bool has_unwinding_info,
                            bool is_turbofanned, int stack_slots) {
337
  CHECK(0 <= stack_slots && stack_slots < StackSlotsField::kMax);
338 339 340 341 342 343
  static_assert(Code::NUMBER_OF_KINDS <= KindField::kMax + 1, "field overflow");
  uint32_t flags = HasUnwindingInfoField::encode(has_unwinding_info) |
                   KindField::encode(kind) |
                   IsTurbofannedField::encode(is_turbofanned) |
                   StackSlotsField::encode(stack_slots);
  WRITE_UINT32_FIELD(this, kFlagsOffset, flags);
344
  DCHECK_IMPLIES(stack_slots != 0, has_safepoint_info());
345 346 347 348
}

inline bool Code::is_interpreter_trampoline_builtin() const {
  Builtins* builtins = GetIsolate()->builtins();
349 350
  Code* interpreter_entry_trampoline =
      builtins->builtin(Builtins::kInterpreterEntryTrampoline);
351
  bool is_interpreter_trampoline =
352
      (builtin_index() == interpreter_entry_trampoline->builtin_index() ||
353 354 355 356 357 358 359 360
       this == builtins->builtin(Builtins::kInterpreterEnterBytecodeAdvance) ||
       this == builtins->builtin(Builtins::kInterpreterEnterBytecodeDispatch));
  DCHECK_IMPLIES(is_interpreter_trampoline, !Builtins::IsLazy(builtin_index()));
  return is_interpreter_trampoline;
}

inline bool Code::checks_optimization_marker() const {
  Builtins* builtins = GetIsolate()->builtins();
361 362
  Code* interpreter_entry_trampoline =
      builtins->builtin(Builtins::kInterpreterEntryTrampoline);
363 364
  bool checks_marker =
      (this == builtins->builtin(Builtins::kCompileLazy) ||
365
       builtin_index() == interpreter_entry_trampoline->builtin_index());
366 367 368 369 370 371
  DCHECK_IMPLIES(checks_marker, !Builtins::IsLazy(builtin_index()));
  return checks_marker ||
         (kind() == OPTIMIZED_FUNCTION && marked_for_deoptimization());
}

inline bool Code::has_tagged_params() const {
372 373
  return kind() != JS_TO_WASM_FUNCTION && kind() != C_WASM_ENTRY &&
         kind() != WASM_FUNCTION;
374 375
}

376 377
inline bool Code::has_unwinding_info() const {
  return HasUnwindingInfoField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
378 379 380
}

inline bool Code::is_turbofanned() const {
381
  return IsTurbofannedField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
382 383 384 385
}

inline bool Code::can_have_weak_objects() const {
  DCHECK(kind() == OPTIMIZED_FUNCTION);
386 387
  int flags = code_data_container()->kind_specific_flags();
  return CanHaveWeakObjectsField::decode(flags);
388 389 390 391
}

inline void Code::set_can_have_weak_objects(bool value) {
  DCHECK(kind() == OPTIMIZED_FUNCTION);
392
  int previous = code_data_container()->kind_specific_flags();
393
  int updated = CanHaveWeakObjectsField::update(previous, value);
394
  code_data_container()->set_kind_specific_flags(updated);
395 396 397 398
}

inline bool Code::is_construct_stub() const {
  DCHECK(kind() == BUILTIN);
399 400
  int flags = code_data_container()->kind_specific_flags();
  return IsConstructStubField::decode(flags);
401 402 403 404
}

inline void Code::set_is_construct_stub(bool value) {
  DCHECK(kind() == BUILTIN);
405
  int previous = code_data_container()->kind_specific_flags();
406
  int updated = IsConstructStubField::update(previous, value);
407
  code_data_container()->set_kind_specific_flags(updated);
408 409 410 411
}

inline bool Code::is_promise_rejection() const {
  DCHECK(kind() == BUILTIN);
412 413
  int flags = code_data_container()->kind_specific_flags();
  return IsPromiseRejectionField::decode(flags);
414 415 416 417
}

inline void Code::set_is_promise_rejection(bool value) {
  DCHECK(kind() == BUILTIN);
418
  int previous = code_data_container()->kind_specific_flags();
419
  int updated = IsPromiseRejectionField::update(previous, value);
420
  code_data_container()->set_kind_specific_flags(updated);
421 422 423 424
}

inline bool Code::is_exception_caught() const {
  DCHECK(kind() == BUILTIN);
425 426
  int flags = code_data_container()->kind_specific_flags();
  return IsExceptionCaughtField::decode(flags);
427 428 429 430
}

inline void Code::set_is_exception_caught(bool value) {
  DCHECK(kind() == BUILTIN);
431
  int previous = code_data_container()->kind_specific_flags();
432
  int updated = IsExceptionCaughtField::update(previous, value);
433
  code_data_container()->set_kind_specific_flags(updated);
434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454
}

inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
  if (is_promise_rejection()) return HandlerTable::PROMISE;
  if (is_exception_caught()) return HandlerTable::CAUGHT;
  return HandlerTable::UNCAUGHT;
}

int Code::builtin_index() const {
  int index = READ_INT_FIELD(this, kBuiltinIndexOffset);
  DCHECK(index == -1 || Builtins::IsBuiltinId(index));
  return index;
}

void Code::set_builtin_index(int index) {
  DCHECK(index == -1 || Builtins::IsBuiltinId(index));
  WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
}

bool Code::is_builtin() const { return builtin_index() != -1; }

455 456 457 458
bool Code::has_safepoint_info() const {
  return is_turbofanned() || is_wasm_code();
}

459
int Code::stack_slots() const {
460
  DCHECK(has_safepoint_info());
461
  return StackSlotsField::decode(READ_UINT32_FIELD(this, kFlagsOffset));
462 463
}

464
int Code::safepoint_table_offset() const {
465
  DCHECK(has_safepoint_info());
466
  return READ_INT32_FIELD(this, kSafepointTableOffsetOffset);
467 468
}

469 470
void Code::set_safepoint_table_offset(int offset) {
  CHECK_LE(0, offset);
471
  DCHECK(has_safepoint_info() || offset == 0);  // Allow zero initialization.
472
  DCHECK(IsAligned(offset, static_cast<unsigned>(kIntSize)));
473
  WRITE_INT32_FIELD(this, kSafepointTableOffsetOffset, offset);
474 475 476 477
}

bool Code::marked_for_deoptimization() const {
  DCHECK(kind() == OPTIMIZED_FUNCTION);
478 479
  int flags = code_data_container()->kind_specific_flags();
  return MarkedForDeoptimizationField::decode(flags);
480 481 482 483 484
}

void Code::set_marked_for_deoptimization(bool flag) {
  DCHECK(kind() == OPTIMIZED_FUNCTION);
  DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
485
  int previous = code_data_container()->kind_specific_flags();
486
  int updated = MarkedForDeoptimizationField::update(previous, flag);
487
  code_data_container()->set_kind_specific_flags(updated);
488 489 490 491
}

bool Code::deopt_already_counted() const {
  DCHECK(kind() == OPTIMIZED_FUNCTION);
492 493
  int flags = code_data_container()->kind_specific_flags();
  return DeoptAlreadyCountedField::decode(flags);
494 495 496 497 498
}

void Code::set_deopt_already_counted(bool flag) {
  DCHECK(kind() == OPTIMIZED_FUNCTION);
  DCHECK_IMPLIES(flag, AllowDeoptimization::IsAllowed(GetIsolate()));
499
  int previous = code_data_container()->kind_specific_flags();
500
  int updated = DeoptAlreadyCountedField::update(previous, flag);
501
  code_data_container()->set_kind_specific_flags(updated);
502 503 504 505 506 507
}

bool Code::is_stub() const { return kind() == STUB; }
bool Code::is_optimized_code() const { return kind() == OPTIMIZED_FUNCTION; }
bool Code::is_wasm_code() const { return kind() == WASM_FUNCTION; }

508
int Code::constant_pool_offset() const {
509
  if (!FLAG_enable_embedded_constant_pool) return InstructionSize();
510 511 512 513 514 515 516 517
  return READ_INT_FIELD(this, kConstantPoolOffset);
}

void Code::set_constant_pool_offset(int value) {
  if (!FLAG_enable_embedded_constant_pool) return;
  WRITE_INT_FIELD(this, kConstantPoolOffset, value);
}

518
Address Code::constant_pool() const {
519 520
  if (FLAG_enable_embedded_constant_pool) {
    int offset = constant_pool_offset();
521 522
    if (offset < InstructionSize()) {
      return InstructionStart() + offset;
523 524
    }
  }
525
  return kNullAddress;
526 527 528 529 530 531 532 533 534 535 536 537 538 539 540 541 542 543 544 545 546 547 548 549 550 551 552 553 554 555 556 557 558 559 560 561 562 563 564 565 566 567 568
}

Code* Code::GetCodeFromTargetAddress(Address address) {
  HeapObject* code = HeapObject::FromAddress(address - Code::kHeaderSize);
  // GetCodeFromTargetAddress might be called when marking objects during mark
  // sweep. reinterpret_cast is therefore used instead of the more appropriate
  // Code::cast. Code::cast does not work when the object's map is
  // marked.
  Code* result = reinterpret_cast<Code*>(code);
  return result;
}

Object* Code::GetObjectFromCodeEntry(Address code_entry) {
  return HeapObject::FromAddress(code_entry - Code::kHeaderSize);
}

Object* Code::GetObjectFromEntryAddress(Address location_of_address) {
  return GetObjectFromCodeEntry(Memory::Address_at(location_of_address));
}

bool Code::CanContainWeakObjects() {
  return is_optimized_code() && can_have_weak_objects();
}

bool Code::IsWeakObject(Object* object) {
  return (CanContainWeakObjects() && IsWeakObjectInOptimizedCode(object));
}

bool Code::IsWeakObjectInOptimizedCode(Object* object) {
  if (object->IsMap()) {
    return Map::cast(object)->CanTransition();
  }
  if (object->IsCell()) {
    object = Cell::cast(object)->value();
  } else if (object->IsPropertyCell()) {
    object = PropertyCell::cast(object)->value();
  }
  if (object->IsJSReceiver() || object->IsContext()) {
    return true;
  }
  return false;
}

569
INT_ACCESSORS(CodeDataContainer, kind_specific_flags, kKindSpecificFlagsOffset)
570
ACCESSORS(CodeDataContainer, next_code_link, Object, kNextCodeLinkOffset)
571 572

void CodeDataContainer::clear_padding() {
573 574
  memset(reinterpret_cast<void*>(address() + kUnalignedSize), 0,
         kSize - kUnalignedSize);
575 576
}

577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671
byte BytecodeArray::get(int index) {
  DCHECK(index >= 0 && index < this->length());
  return READ_BYTE_FIELD(this, kHeaderSize + index * kCharSize);
}

void BytecodeArray::set(int index, byte value) {
  DCHECK(index >= 0 && index < this->length());
  WRITE_BYTE_FIELD(this, kHeaderSize + index * kCharSize, value);
}

void BytecodeArray::set_frame_size(int frame_size) {
  DCHECK_GE(frame_size, 0);
  DCHECK(IsAligned(frame_size, static_cast<unsigned>(kPointerSize)));
  WRITE_INT_FIELD(this, kFrameSizeOffset, frame_size);
}

int BytecodeArray::frame_size() const {
  return READ_INT_FIELD(this, kFrameSizeOffset);
}

int BytecodeArray::register_count() const {
  return frame_size() / kPointerSize;
}

void BytecodeArray::set_parameter_count(int number_of_parameters) {
  DCHECK_GE(number_of_parameters, 0);
  // Parameter count is stored as the size on stack of the parameters to allow
  // it to be used directly by generated code.
  WRITE_INT_FIELD(this, kParameterSizeOffset,
                  (number_of_parameters << kPointerSizeLog2));
}

interpreter::Register BytecodeArray::incoming_new_target_or_generator_register()
    const {
  int register_operand =
      READ_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset);
  if (register_operand == 0) {
    return interpreter::Register::invalid_value();
  } else {
    return interpreter::Register::FromOperand(register_operand);
  }
}

void BytecodeArray::set_incoming_new_target_or_generator_register(
    interpreter::Register incoming_new_target_or_generator_register) {
  if (!incoming_new_target_or_generator_register.is_valid()) {
    WRITE_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset, 0);
  } else {
    DCHECK(incoming_new_target_or_generator_register.index() <
           register_count());
    DCHECK_NE(0, incoming_new_target_or_generator_register.ToOperand());
    WRITE_INT_FIELD(this, kIncomingNewTargetOrGeneratorRegisterOffset,
                    incoming_new_target_or_generator_register.ToOperand());
  }
}

int BytecodeArray::interrupt_budget() const {
  return READ_INT_FIELD(this, kInterruptBudgetOffset);
}

void BytecodeArray::set_interrupt_budget(int interrupt_budget) {
  DCHECK_GE(interrupt_budget, 0);
  WRITE_INT_FIELD(this, kInterruptBudgetOffset, interrupt_budget);
}

int BytecodeArray::osr_loop_nesting_level() const {
  return READ_INT8_FIELD(this, kOSRNestingLevelOffset);
}

void BytecodeArray::set_osr_loop_nesting_level(int depth) {
  DCHECK(0 <= depth && depth <= AbstractCode::kMaxLoopNestingMarker);
  STATIC_ASSERT(AbstractCode::kMaxLoopNestingMarker < kMaxInt8);
  WRITE_INT8_FIELD(this, kOSRNestingLevelOffset, depth);
}

BytecodeArray::Age BytecodeArray::bytecode_age() const {
  // Bytecode is aged by the concurrent marker.
  return static_cast<Age>(RELAXED_READ_INT8_FIELD(this, kBytecodeAgeOffset));
}

void BytecodeArray::set_bytecode_age(BytecodeArray::Age age) {
  DCHECK_GE(age, kFirstBytecodeAge);
  DCHECK_LE(age, kLastBytecodeAge);
  STATIC_ASSERT(kLastBytecodeAge <= kMaxInt8);
  // Bytecode is aged by the concurrent marker.
  RELAXED_WRITE_INT8_FIELD(this, kBytecodeAgeOffset, static_cast<int8_t>(age));
}

int BytecodeArray::parameter_count() const {
  // Parameter count is stored as the size on stack of the parameters to allow
  // it to be used directly by generated code.
  return READ_INT_FIELD(this, kParameterSizeOffset) >> kPointerSizeLog2;
}

ACCESSORS(BytecodeArray, constant_pool, FixedArray, kConstantPoolOffset)
672
ACCESSORS(BytecodeArray, handler_table, ByteArray, kHandlerTableOffset)
673 674 675 676 677
ACCESSORS(BytecodeArray, source_position_table, Object,
          kSourcePositionTableOffset)

void BytecodeArray::clear_padding() {
  int data_size = kHeaderSize + length();
678 679
  memset(reinterpret_cast<void*>(address() + data_size), 0,
         SizeFor(length()) - data_size);
680 681 682 683 684 685 686 687 688 689 690 691 692 693
}

Address BytecodeArray::GetFirstBytecodeAddress() {
  return reinterpret_cast<Address>(this) - kHeapObjectTag + kHeaderSize;
}

ByteArray* BytecodeArray::SourcePositionTable() {
  Object* maybe_table = source_position_table();
  if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
  DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
  return SourcePositionTableWithFrameCache::cast(maybe_table)
      ->source_position_table();
}

694 695 696 697 698 699 700 701
void BytecodeArray::ClearFrameCacheFromSourcePositionTable() {
  Object* maybe_table = source_position_table();
  if (maybe_table->IsByteArray()) return;
  DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
  set_source_position_table(SourcePositionTableWithFrameCache::cast(maybe_table)
                                ->source_position_table());
}

702 703 704 705 706 707 708 709 710 711
int BytecodeArray::BytecodeArraySize() { return SizeFor(this->length()); }

int BytecodeArray::SizeIncludingMetadata() {
  int size = BytecodeArraySize();
  size += constant_pool()->Size();
  size += handler_table()->Size();
  size += SourcePositionTable()->Size();
  return size;
}

712
BailoutId DeoptimizationData::BytecodeOffset(int i) {
713 714 715
  return BailoutId(BytecodeOffsetRaw(i)->value());
}

716
void DeoptimizationData::SetBytecodeOffset(int i, BailoutId value) {
717 718 719
  SetBytecodeOffsetRaw(i, Smi::FromInt(value.ToInt()));
}

720
int DeoptimizationData::DeoptCount() {
721 722 723 724 725 726 727 728 729
  return (length() - kFirstDeoptEntryIndex) / kDeoptEntrySize;
}

}  // namespace internal
}  // namespace v8

#include "src/objects/object-macros-undef.h"

#endif  // V8_OBJECTS_CODE_INL_H_