Commit b7a9c022 authored by Camillo Bruni's avatar Camillo Bruni Committed by Commit Bot

[cleanup] Introduce more const methods

Change-Id: I703c90cfbb91f4afc035fb32a44f33af9fe736f3
Reviewed-on: https://chromium-review.googlesource.com/548459
Commit-Queue: Camillo Bruni <cbruni@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Reviewed-by: 's avatarJakob Kummerow <jkummerow@chromium.org>
Cr-Commit-Position: refs/heads/master@{#46392}
parent 9cb2211c
......@@ -11,8 +11,7 @@
namespace v8 {
namespace internal {
inline FieldIndex FieldIndex::ForInObjectOffset(int offset, Map* map) {
inline FieldIndex FieldIndex::ForInObjectOffset(int offset, const Map* map) {
DCHECK((offset % kPointerSize) == 0);
int index = offset / kPointerSize;
DCHECK(map == NULL ||
......@@ -21,8 +20,7 @@ inline FieldIndex FieldIndex::ForInObjectOffset(int offset, Map* map) {
return FieldIndex(true, index, false, 0, 0, true);
}
inline FieldIndex FieldIndex::ForPropertyIndex(Map* map,
inline FieldIndex FieldIndex::ForPropertyIndex(const Map* map,
int property_index,
bool is_double) {
DCHECK(map->instance_type() >= FIRST_NONSTRING_TYPE);
......@@ -42,7 +40,8 @@ inline FieldIndex FieldIndex::ForPropertyIndex(Map* map,
// Takes an index as computed by GetLoadByFieldIndex and reconstructs a
// FieldIndex object from it.
inline FieldIndex FieldIndex::ForLoadByFieldIndex(Map* map, int orig_index) {
inline FieldIndex FieldIndex::ForLoadByFieldIndex(const Map* map,
int orig_index) {
int field_index = orig_index;
bool is_inobject = true;
bool is_double = field_index & 1;
......@@ -85,7 +84,8 @@ inline int FieldIndex::GetLoadByFieldIndex() const {
return is_double() ? (result | 1) : result;
}
inline FieldIndex FieldIndex::ForDescriptor(Map* map, int descriptor_index) {
inline FieldIndex FieldIndex::ForDescriptor(const Map* map,
int descriptor_index) {
PropertyDetails details =
map->instance_descriptors()->GetDetails(descriptor_index);
int field_index = details.field_index();
......
......@@ -21,12 +21,11 @@ class FieldIndex final {
public:
FieldIndex() : bit_field_(0) {}
static FieldIndex ForPropertyIndex(Map* map,
int index,
static FieldIndex ForPropertyIndex(const Map* map, int index,
bool is_double = false);
static FieldIndex ForInObjectOffset(int offset, Map* map = NULL);
static FieldIndex ForDescriptor(Map* map, int descriptor_index);
static FieldIndex ForLoadByFieldIndex(Map* map, int index);
static FieldIndex ForInObjectOffset(int offset, const Map* map = NULL);
static FieldIndex ForDescriptor(const Map* map, int descriptor_index);
static FieldIndex ForLoadByFieldIndex(const Map* map, int index);
static FieldIndex FromFieldAccessStubKey(int key);
int GetLoadByFieldIndex() const;
......
......@@ -3088,8 +3088,7 @@ Heap::RootListIndex Heap::RootIndexForEmptyFixedTypedArray(
}
}
FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(Map* map) {
FixedTypedArrayBase* Heap::EmptyFixedTypedArrayForMap(const Map* map) {
return FixedTypedArrayBase::cast(
roots_[RootIndexForEmptyFixedTypedArray(map->elements_kind())]);
}
......
......@@ -1115,7 +1115,7 @@ class Heap {
RootListIndex RootIndexForFixedTypedArray(ExternalArrayType array_type);
RootListIndex RootIndexForEmptyFixedTypedArray(ElementsKind kind);
FixedTypedArrayBase* EmptyFixedTypedArrayForMap(Map* map);
FixedTypedArrayBase* EmptyFixedTypedArrayForMap(const Map* map);
void RegisterStrongRoots(Object** start, Object** end);
void UnregisterStrongRoots(Object** start);
......
......@@ -719,7 +719,7 @@ bool Object::FitsRepresentation(Representation representation) {
return true;
}
bool Object::ToUint32(uint32_t* value) {
bool Object::ToUint32(uint32_t* value) const {
if (IsSmi()) {
int num = Smi::cast(this)->value();
if (num < 0) return false;
......@@ -908,10 +908,7 @@ MapWord MapWord::FromMap(const Map* map) {
return MapWord(reinterpret_cast<uintptr_t>(map));
}
Map* MapWord::ToMap() {
return reinterpret_cast<Map*>(value_);
}
Map* MapWord::ToMap() const { return reinterpret_cast<Map*>(value_); }
bool MapWord::IsForwardingAddress() const {
return HAS_SMI_TAG(reinterpret_cast<Object*>(value_));
......@@ -973,8 +970,7 @@ void HeapObject::set_map(Map* value) {
}
}
Map* HeapObject::synchronized_map() {
Map* HeapObject::synchronized_map() const {
return synchronized_map_word().ToMap();
}
......@@ -1041,11 +1037,7 @@ void HeapObject::synchronized_set_map_word(MapWord map_word) {
this, kMapOffset, reinterpret_cast<Object*>(map_word.value_));
}
int HeapObject::Size() {
return SizeFromMap(map());
}
int HeapObject::Size() const { return SizeFromMap(map()); }
double HeapNumber::value() const {
return READ_DOUBLE_FIELD(this, kValueOffset);
......@@ -1563,7 +1555,8 @@ inline bool IsSpecialReceiverInstanceType(InstanceType instance_type) {
return instance_type <= LAST_SPECIAL_RECEIVER_TYPE;
}
int JSObject::GetEmbedderFieldCount(Map* map) {
// static
int JSObject::GetEmbedderFieldCount(const Map* map) {
int instance_size = map->instance_size();
if (instance_size == kVariableSizeSentinel) return 0;
InstanceType instance_type = map->instance_type();
......@@ -1571,7 +1564,9 @@ int JSObject::GetEmbedderFieldCount(Map* map) {
map->GetInObjectProperties();
}
int JSObject::GetEmbedderFieldCount() { return GetEmbedderFieldCount(map()); }
int JSObject::GetEmbedderFieldCount() const {
return GetEmbedderFieldCount(map());
}
int JSObject::GetEmbedderFieldOffset(int index) {
DCHECK(index < GetEmbedderFieldCount() && index >= 0);
......@@ -1611,8 +1606,7 @@ bool JSObject::IsUnboxedDoubleField(FieldIndex index) {
return map()->IsUnboxedDoubleField(index);
}
bool Map::IsUnboxedDoubleField(FieldIndex index) {
bool Map::IsUnboxedDoubleField(FieldIndex index) const {
if (!FLAG_unbox_double_fields) return false;
if (index.is_hidden_field() || !index.is_inobject()) return false;
return !layout_descriptor()->IsTagged(index.property_index());
......@@ -1748,8 +1742,7 @@ void JSObject::InitializeBody(Map* map, int start_offset,
}
}
bool Map::TooManyFastProperties(StoreFromKeyed store_mode) {
bool Map::TooManyFastProperties(StoreFromKeyed store_mode) const {
if (unused_property_fields() != 0) return false;
if (is_prototype_map()) return false;
int minimum = store_mode == CERTAINLY_NOT_STORE_FROM_KEYED ? 128 : 12;
......@@ -1766,10 +1759,11 @@ void Struct::InitializeBody(int object_size) {
}
}
bool Object::ToArrayLength(uint32_t* index) { return Object::ToUint32(index); }
bool Object::ToArrayLength(uint32_t* index) const {
return Object::ToUint32(index);
}
bool Object::ToArrayIndex(uint32_t* index) {
bool Object::ToArrayIndex(uint32_t* index) const {
return Object::ToUint32(index) && *index != kMaxUInt32;
}
......@@ -2034,8 +2028,7 @@ WriteBarrierMode HeapObject::GetWriteBarrierMode(
return UPDATE_WRITE_BARRIER;
}
AllocationAlignment HeapObject::RequiredAlignment() {
AllocationAlignment HeapObject::RequiredAlignment() const {
#ifdef V8_HOST_ARCH_32_BIT
if ((IsFixedFloat64Array() || IsFixedDoubleArray()) &&
FixedArrayBase::cast(this)->length() != 0) {
......@@ -2301,19 +2294,17 @@ int DescriptorArray::SearchWithCache(Isolate* isolate, Name* name, Map* map) {
return number;
}
PropertyDetails Map::GetLastDescriptorDetails() {
PropertyDetails Map::GetLastDescriptorDetails() const {
return instance_descriptors()->GetDetails(LastAdded());
}
int Map::LastAdded() {
int Map::LastAdded() const {
int number_of_own_descriptors = NumberOfOwnDescriptors();
DCHECK(number_of_own_descriptors > 0);
return number_of_own_descriptors - 1;
}
int Map::NumberOfOwnDescriptors() {
int Map::NumberOfOwnDescriptors() const {
return NumberOfOwnDescriptorsBits::decode(bit_field3());
}
......@@ -2323,9 +2314,7 @@ void Map::SetNumberOfOwnDescriptors(int number) {
set_bit_field3(NumberOfOwnDescriptorsBits::update(bit_field3(), number));
}
int Map::EnumLength() { return EnumLengthBits::decode(bit_field3()); }
int Map::EnumLength() const { return EnumLengthBits::decode(bit_field3()); }
void Map::SetEnumLength(int length) {
if (length != kInvalidEnumCacheSentinel) {
......@@ -2336,8 +2325,7 @@ void Map::SetEnumLength(int length) {
set_bit_field3(EnumLengthBits::update(bit_field3(), length));
}
FixedArrayBase* Map::GetInitialElements() {
FixedArrayBase* Map::GetInitialElements() const {
FixedArrayBase* result = nullptr;
if (has_fast_elements() || has_fast_string_wrapper_elements()) {
result = GetHeap()->empty_fixed_array();
......@@ -2479,18 +2467,15 @@ void DescriptorArray::SwapSortedKeys(int first, int second) {
SetSortedKey(second, first_key);
}
int HashTableBase::NumberOfElements() {
int HashTableBase::NumberOfElements() const {
return Smi::cast(get(kNumberOfElementsIndex))->value();
}
int HashTableBase::NumberOfDeletedElements() {
int HashTableBase::NumberOfDeletedElements() const {
return Smi::cast(get(kNumberOfDeletedElementsIndex))->value();
}
int HashTableBase::Capacity() {
int HashTableBase::Capacity() const {
return Smi::cast(get(kCapacityIndex))->value();
}
......@@ -3004,28 +2989,24 @@ int FixedTypedArrayBase::ElementSize(InstanceType type) {
return element_size;
}
int FixedTypedArrayBase::DataSize(InstanceType type) {
int FixedTypedArrayBase::DataSize(InstanceType type) const {
if (base_pointer() == Smi::kZero) return 0;
return length() * ElementSize(type);
}
int FixedTypedArrayBase::DataSize() {
int FixedTypedArrayBase::DataSize() const {
return DataSize(map()->instance_type());
}
int FixedTypedArrayBase::size() {
int FixedTypedArrayBase::size() const {
return OBJECT_POINTER_ALIGN(kDataOffset + DataSize());
}
int FixedTypedArrayBase::TypedArraySize(InstanceType type) {
int FixedTypedArrayBase::TypedArraySize(InstanceType type) const {
return OBJECT_POINTER_ALIGN(kDataOffset + DataSize(type));
}
// static
int FixedTypedArrayBase::TypedArraySize(InstanceType type, int length) {
return OBJECT_POINTER_ALIGN(kDataOffset + length * ElementSize(type));
}
......@@ -3195,38 +3176,33 @@ Handle<Object> Float64ArrayTraits::ToHandle(Isolate* isolate, double scalar) {
return isolate->factory()->NewNumber(scalar);
}
int Map::visitor_id() {
return READ_BYTE_FIELD(this, kVisitorIdOffset);
}
int Map::visitor_id() const { return READ_BYTE_FIELD(this, kVisitorIdOffset); }
void Map::set_visitor_id(int id) {
DCHECK(0 <= id && id < 256);
DCHECK_LE(0, id);
DCHECK_LT(id, 256);
WRITE_BYTE_FIELD(this, kVisitorIdOffset, static_cast<byte>(id));
}
int Map::instance_size() {
int Map::instance_size() const {
return RELAXED_READ_BYTE_FIELD(this, kInstanceSizeOffset) << kPointerSizeLog2;
}
int Map::inobject_properties_or_constructor_function_index() {
int Map::inobject_properties_or_constructor_function_index() const {
return RELAXED_READ_BYTE_FIELD(
this, kInObjectPropertiesOrConstructorFunctionIndexOffset);
}
void Map::set_inobject_properties_or_constructor_function_index(int value) {
DCHECK(0 <= value && value < 256);
DCHECK_LE(0, value);
DCHECK_LT(value, 256);
RELAXED_WRITE_BYTE_FIELD(this,
kInObjectPropertiesOrConstructorFunctionIndexOffset,
static_cast<byte>(value));
}
int Map::GetInObjectProperties() {
int Map::GetInObjectProperties() const {
DCHECK(IsJSObjectMap());
return inobject_properties_or_constructor_function_index();
}
......@@ -3237,8 +3213,7 @@ void Map::SetInObjectProperties(int value) {
set_inobject_properties_or_constructor_function_index(value);
}
int Map::GetConstructorFunctionIndex() {
int Map::GetConstructorFunctionIndex() const {
DCHECK(IsPrimitiveMap());
return inobject_properties_or_constructor_function_index();
}
......@@ -3249,8 +3224,7 @@ void Map::SetConstructorFunctionIndex(int value) {
set_inobject_properties_or_constructor_function_index(value);
}
int Map::GetInObjectPropertyOffset(int index) {
int Map::GetInObjectPropertyOffset(int index) const {
// Adjust for the number of properties stored in the object.
index -= GetInObjectProperties();
DCHECK(index <= 0);
......@@ -3264,8 +3238,7 @@ Handle<Map> Map::AddMissingTransitionsForTesting(
return AddMissingTransitions(split_map, descriptors, full_layout_descriptor);
}
int HeapObject::SizeFromMap(Map* map) {
int HeapObject::SizeFromMap(Map* map) const {
int instance_size = map->instance_size();
if (instance_size != kVariableSizeSentinel) return instance_size;
// Only inline the most frequent cases.
......@@ -3273,50 +3246,50 @@ int HeapObject::SizeFromMap(Map* map) {
if (instance_type == FIXED_ARRAY_TYPE ||
instance_type == TRANSITION_ARRAY_TYPE) {
return FixedArray::SizeFor(
reinterpret_cast<FixedArray*>(this)->synchronized_length());
reinterpret_cast<const FixedArray*>(this)->synchronized_length());
}
if (instance_type == ONE_BYTE_STRING_TYPE ||
instance_type == ONE_BYTE_INTERNALIZED_STRING_TYPE) {
// Strings may get concurrently truncated, hence we have to access its
// length synchronized.
return SeqOneByteString::SizeFor(
reinterpret_cast<SeqOneByteString*>(this)->synchronized_length());
reinterpret_cast<const SeqOneByteString*>(this)->synchronized_length());
}
if (instance_type == BYTE_ARRAY_TYPE) {
return ByteArray::SizeFor(
reinterpret_cast<ByteArray*>(this)->synchronized_length());
reinterpret_cast<const ByteArray*>(this)->synchronized_length());
}
if (instance_type == BYTECODE_ARRAY_TYPE) {
return BytecodeArray::SizeFor(
reinterpret_cast<BytecodeArray*>(this)->synchronized_length());
reinterpret_cast<const BytecodeArray*>(this)->synchronized_length());
}
if (instance_type == FREE_SPACE_TYPE) {
return reinterpret_cast<FreeSpace*>(this)->relaxed_read_size();
return reinterpret_cast<const FreeSpace*>(this)->relaxed_read_size();
}
if (instance_type == STRING_TYPE ||
instance_type == INTERNALIZED_STRING_TYPE) {
// Strings may get concurrently truncated, hence we have to access its
// length synchronized.
return SeqTwoByteString::SizeFor(
reinterpret_cast<SeqTwoByteString*>(this)->synchronized_length());
reinterpret_cast<const SeqTwoByteString*>(this)->synchronized_length());
}
if (instance_type == FIXED_DOUBLE_ARRAY_TYPE) {
return FixedDoubleArray::SizeFor(
reinterpret_cast<FixedDoubleArray*>(this)->synchronized_length());
reinterpret_cast<const FixedDoubleArray*>(this)->synchronized_length());
}
if (instance_type >= FIRST_FIXED_TYPED_ARRAY_TYPE &&
instance_type <= LAST_FIXED_TYPED_ARRAY_TYPE) {
return reinterpret_cast<FixedTypedArrayBase*>(
this)->TypedArraySize(instance_type);
return reinterpret_cast<const FixedTypedArrayBase*>(this)->TypedArraySize(
instance_type);
}
if (instance_type == SMALL_ORDERED_HASH_SET_TYPE) {
return reinterpret_cast<SmallOrderedHashSet*>(this)->Size();
return reinterpret_cast<const SmallOrderedHashSet*>(this)->Size();
}
if (instance_type == SMALL_ORDERED_HASH_MAP_TYPE) {
return reinterpret_cast<SmallOrderedHashMap*>(this)->Size();
return reinterpret_cast<const SmallOrderedHashMap*>(this)->Size();
}
DCHECK(instance_type == CODE_TYPE);
return reinterpret_cast<Code*>(this)->CodeSize();
return reinterpret_cast<const Code*>(this)->CodeSize();
}
......@@ -3330,8 +3303,7 @@ void Map::set_instance_size(int value) {
void Map::clear_unused() { WRITE_BYTE_FIELD(this, kUnusedOffset, 0); }
InstanceType Map::instance_type() {
InstanceType Map::instance_type() const {
return static_cast<InstanceType>(READ_BYTE_FIELD(this, kInstanceTypeOffset));
}
......@@ -3340,8 +3312,7 @@ void Map::set_instance_type(InstanceType value) {
WRITE_BYTE_FIELD(this, kInstanceTypeOffset, value);
}
int Map::unused_property_fields() {
int Map::unused_property_fields() const {
return READ_BYTE_FIELD(this, kUnusedPropertyFieldsOffset);
}
......@@ -3375,8 +3346,7 @@ void Map::set_non_instance_prototype(bool value) {
}
}
bool Map::has_non_instance_prototype() {
bool Map::has_non_instance_prototype() const {
return ((1 << kHasNonInstancePrototype) & bit_field()) != 0;
}
......@@ -3407,8 +3377,7 @@ void Map::set_has_indexed_interceptor() {
set_bit_field(bit_field() | (1 << kHasIndexedInterceptor));
}
bool Map::has_indexed_interceptor() {
bool Map::has_indexed_interceptor() const {
return ((1 << kHasIndexedInterceptor) & bit_field()) != 0;
}
......@@ -3417,8 +3386,7 @@ void Map::set_is_undetectable() {
set_bit_field(bit_field() | (1 << kIsUndetectable));
}
bool Map::is_undetectable() {
bool Map::is_undetectable() const {
return ((1 << kIsUndetectable) & bit_field()) != 0;
}
......@@ -3427,8 +3395,7 @@ void Map::set_has_named_interceptor() {
set_bit_field(bit_field() | (1 << kHasNamedInterceptor));
}
bool Map::has_named_interceptor() {
bool Map::has_named_interceptor() const {
return ((1 << kHasNamedInterceptor) & bit_field()) != 0;
}
......@@ -3441,8 +3408,7 @@ void Map::set_is_access_check_needed(bool access_check_needed) {
}
}
bool Map::is_access_check_needed() {
bool Map::is_access_check_needed() const {
return ((1 << kIsAccessCheckNeeded) & bit_field()) != 0;
}
......@@ -3455,7 +3421,7 @@ void Map::set_is_extensible(bool value) {
}
}
bool Map::is_extensible() {
bool Map::is_extensible() const {
return ((1 << kIsExtensible) & bit_field2()) != 0;
}
......@@ -3480,44 +3446,47 @@ void Map::set_elements_kind(ElementsKind elements_kind) {
DCHECK(this->elements_kind() == elements_kind);
}
ElementsKind Map::elements_kind() {
ElementsKind Map::elements_kind() const {
return Map::ElementsKindBits::decode(bit_field2());
}
bool Map::has_fast_smi_elements() { return IsSmiElementsKind(elements_kind()); }
bool Map::has_fast_smi_elements() const {
return IsSmiElementsKind(elements_kind());
}
bool Map::has_fast_object_elements() {
bool Map::has_fast_object_elements() const {
return IsObjectElementsKind(elements_kind());
}
bool Map::has_fast_smi_or_object_elements() {
bool Map::has_fast_smi_or_object_elements() const {
return IsSmiOrObjectElementsKind(elements_kind());
}
bool Map::has_fast_double_elements() {
bool Map::has_fast_double_elements() const {
return IsDoubleElementsKind(elements_kind());
}
bool Map::has_fast_elements() { return IsFastElementsKind(elements_kind()); }
bool Map::has_fast_elements() const {
return IsFastElementsKind(elements_kind());
}
bool Map::has_sloppy_arguments_elements() {
bool Map::has_sloppy_arguments_elements() const {
return IsSloppyArgumentsElementsKind(elements_kind());
}
bool Map::has_fast_sloppy_arguments_elements() {
bool Map::has_fast_sloppy_arguments_elements() const {
return elements_kind() == FAST_SLOPPY_ARGUMENTS_ELEMENTS;
}
bool Map::has_fast_string_wrapper_elements() {
bool Map::has_fast_string_wrapper_elements() const {
return elements_kind() == FAST_STRING_WRAPPER_ELEMENTS;
}
bool Map::has_fixed_typed_array_elements() {
bool Map::has_fixed_typed_array_elements() const {
return IsFixedTypedArrayElementsKind(elements_kind());
}
bool Map::has_dictionary_elements() {
bool Map::has_dictionary_elements() const {
return IsDictionaryElementsKind(elements_kind());
}
......@@ -3528,13 +3497,11 @@ void Map::set_dictionary_map(bool value) {
set_bit_field3(new_bit_field3);
}
bool Map::is_dictionary_map() {
bool Map::is_dictionary_map() const {
return DictionaryMap::decode(bit_field3());
}
Code::Flags Code::flags() {
Code::Flags Code::flags() const {
return static_cast<Flags>(READ_INT_FIELD(this, kFlagsOffset));
}
......@@ -3543,8 +3510,7 @@ void Map::set_owns_descriptors(bool owns_descriptors) {
set_bit_field3(OwnsDescriptors::update(bit_field3(), owns_descriptors));
}
bool Map::owns_descriptors() {
bool Map::owns_descriptors() const {
return OwnsDescriptors::decode(bit_field3());
}
......@@ -3561,18 +3527,13 @@ void Map::deprecate() {
set_bit_field3(Deprecated::update(bit_field3(), true));
}
bool Map::is_deprecated() {
return Deprecated::decode(bit_field3());
}
bool Map::is_deprecated() const { return Deprecated::decode(bit_field3()); }
void Map::set_migration_target(bool value) {
set_bit_field3(IsMigrationTarget::update(bit_field3(), value));
}
bool Map::is_migration_target() {
bool Map::is_migration_target() const {
return IsMigrationTarget::decode(bit_field3());
}
......@@ -3580,7 +3541,7 @@ void Map::set_immutable_proto(bool value) {
set_bit_field3(ImmutablePrototype::update(bit_field3(), value));
}
bool Map::is_immutable_proto() {
bool Map::is_immutable_proto() const {
return ImmutablePrototype::decode(bit_field3());
}
......@@ -3588,16 +3549,15 @@ void Map::set_new_target_is_base(bool value) {
set_bit_field3(NewTargetIsBase::update(bit_field3(), value));
}
bool Map::new_target_is_base() { return NewTargetIsBase::decode(bit_field3()); }
bool Map::new_target_is_base() const {
return NewTargetIsBase::decode(bit_field3());
}
void Map::set_construction_counter(int value) {
set_bit_field3(ConstructionCounter::update(bit_field3(), value));
}
int Map::construction_counter() {
int Map::construction_counter() const {
return ConstructionCounter::decode(bit_field3());
}
......@@ -3606,13 +3566,9 @@ void Map::mark_unstable() {
set_bit_field3(IsUnstable::update(bit_field3(), true));
}
bool Map::is_stable() const { return !IsUnstable::decode(bit_field3()); }
bool Map::is_stable() {
return !IsUnstable::decode(bit_field3());
}
bool Map::CanBeDeprecated() {
bool Map::CanBeDeprecated() const {
int descriptor = LastAdded();
for (int i = 0; i <= descriptor; i++) {
PropertyDetails details = instance_descriptors()->GetDetails(i);
......@@ -3637,48 +3593,52 @@ void Map::NotifyLeafMapLayoutChange() {
}
}
bool Map::CanTransition() {
bool Map::CanTransition() const {
// Only JSObject and subtypes have map transitions and back pointers.
STATIC_ASSERT(LAST_TYPE == LAST_JS_OBJECT_TYPE);
return instance_type() >= FIRST_JS_OBJECT_TYPE;
}
bool Map::IsBooleanMap() { return this == GetHeap()->boolean_map(); }
bool Map::IsPrimitiveMap() {
bool Map::IsBooleanMap() const { return this == GetHeap()->boolean_map(); }
bool Map::IsPrimitiveMap() const {
STATIC_ASSERT(FIRST_PRIMITIVE_TYPE == FIRST_TYPE);
return instance_type() <= LAST_PRIMITIVE_TYPE;
}
bool Map::IsJSReceiverMap() {
bool Map::IsJSReceiverMap() const {
STATIC_ASSERT(LAST_JS_RECEIVER_TYPE == LAST_TYPE);
return instance_type() >= FIRST_JS_RECEIVER_TYPE;
}
bool Map::IsJSObjectMap() {
bool Map::IsJSObjectMap() const {
STATIC_ASSERT(LAST_JS_OBJECT_TYPE == LAST_TYPE);
return instance_type() >= FIRST_JS_OBJECT_TYPE;
}
bool Map::IsJSArrayMap() { return instance_type() == JS_ARRAY_TYPE; }
bool Map::IsJSFunctionMap() { return instance_type() == JS_FUNCTION_TYPE; }
bool Map::IsStringMap() { return instance_type() < FIRST_NONSTRING_TYPE; }
bool Map::IsJSProxyMap() { return instance_type() == JS_PROXY_TYPE; }
bool Map::IsJSGlobalProxyMap() {
bool Map::IsJSArrayMap() const { return instance_type() == JS_ARRAY_TYPE; }
bool Map::IsJSFunctionMap() const {
return instance_type() == JS_FUNCTION_TYPE;
}
bool Map::IsStringMap() const { return instance_type() < FIRST_NONSTRING_TYPE; }
bool Map::IsJSProxyMap() const { return instance_type() == JS_PROXY_TYPE; }
bool Map::IsJSGlobalProxyMap() const {
return instance_type() == JS_GLOBAL_PROXY_TYPE;
}
bool Map::IsJSGlobalObjectMap() {
bool Map::IsJSGlobalObjectMap() const {
return instance_type() == JS_GLOBAL_OBJECT_TYPE;
}
bool Map::IsJSTypedArrayMap() { return instance_type() == JS_TYPED_ARRAY_TYPE; }
bool Map::IsJSDataViewMap() { return instance_type() == JS_DATA_VIEW_TYPE; }
bool Map::IsJSTypedArrayMap() const {
return instance_type() == JS_TYPED_ARRAY_TYPE;
}
bool Map::IsJSDataViewMap() const {
return instance_type() == JS_DATA_VIEW_TYPE;
}
bool Map::IsSpecialReceiverMap() {
bool Map::IsSpecialReceiverMap() const {
bool result = IsSpecialReceiverInstanceType(instance_type());
DCHECK_IMPLIES(!result,
!has_named_interceptor() && !is_access_check_needed());
return result;
}
bool Map::CanOmitMapChecks() {
bool Map::CanOmitMapChecks() const {
return is_stable() && FLAG_omit_map_checks_for_leaf_maps;
}
......@@ -3743,12 +3703,9 @@ void Code::set_flags(Code::Flags flags) {
WRITE_INT_FIELD(this, kFlagsOffset, flags);
}
Code::Kind Code::kind() const { return ExtractKindFromFlags(flags()); }
Code::Kind Code::kind() {
return ExtractKindFromFlags(flags());
}
bool Code::IsCodeStubOrIC() {
bool Code::IsCodeStubOrIC() const {
switch (kind()) {
case STUB:
case HANDLER:
......@@ -3761,7 +3718,7 @@ bool Code::IsCodeStubOrIC() {
}
}
ExtraICState Code::extra_ic_state() {
ExtraICState Code::extra_ic_state() const {
DCHECK(is_compare_ic_stub() || is_debug_stub());
return ExtractExtraICStateFromFlags(flags());
}
......@@ -3777,25 +3734,23 @@ void Code::set_raw_kind_specific_flags2(int value) {
WRITE_INT_FIELD(this, kKindSpecificFlags2Offset, value);
}
inline bool Code::is_crankshafted() {
inline bool Code::is_crankshafted() const {
return IsCrankshaftedField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
}
inline bool Code::is_hydrogen_stub() {
inline bool Code::is_hydrogen_stub() const {
return is_crankshafted() && kind() != OPTIMIZED_FUNCTION;
}
inline bool Code::is_interpreter_trampoline_builtin() {
inline bool Code::is_interpreter_trampoline_builtin() const {
Builtins* builtins = GetIsolate()->builtins();
return this == *builtins->InterpreterEntryTrampoline() ||
this == *builtins->InterpreterEnterBytecodeAdvance() ||
this == *builtins->InterpreterEnterBytecodeDispatch();
}
inline bool Code::checks_optimization_marker() {
inline bool Code::checks_optimization_marker() const {
Builtins* builtins = GetIsolate()->builtins();
return this == *builtins->CompileLazy() ||
this == *builtins->InterpreterEntryTrampoline() ||
......@@ -3818,7 +3773,7 @@ inline void Code::set_is_crankshafted(bool value) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
inline bool Code::has_tagged_params() {
inline bool Code::has_tagged_params() const {
int flags = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
return HasTaggedStackField::decode(flags);
}
......@@ -3829,7 +3784,7 @@ inline void Code::set_has_tagged_params(bool value) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
inline bool Code::is_turbofanned() {
inline bool Code::is_turbofanned() const {
return IsTurbofannedField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
}
......@@ -3841,8 +3796,7 @@ inline void Code::set_is_turbofanned(bool value) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
inline bool Code::can_have_weak_objects() {
inline bool Code::can_have_weak_objects() const {
DCHECK(kind() == OPTIMIZED_FUNCTION);
return CanHaveWeakObjectsField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
......@@ -3856,7 +3810,7 @@ inline void Code::set_can_have_weak_objects(bool value) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
inline bool Code::is_construct_stub() {
inline bool Code::is_construct_stub() const {
DCHECK(kind() == BUILTIN);
return IsConstructStubField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
......@@ -3869,7 +3823,7 @@ inline void Code::set_is_construct_stub(bool value) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
inline bool Code::is_promise_rejection() {
inline bool Code::is_promise_rejection() const {
DCHECK(kind() == BUILTIN);
return IsPromiseRejectionField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
......@@ -3882,7 +3836,7 @@ inline void Code::set_is_promise_rejection(bool value) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
inline bool Code::is_exception_caught() {
inline bool Code::is_exception_caught() const {
DCHECK(kind() == BUILTIN);
return IsExceptionCaughtField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
......@@ -3901,8 +3855,7 @@ inline HandlerTable::CatchPrediction Code::GetBuiltinCatchPrediction() {
return HandlerTable::UNCAUGHT;
}
bool Code::has_debug_break_slots() {
bool Code::has_debug_break_slots() const {
DCHECK_EQ(FUNCTION, kind());
unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasDebugBreakSlotsField::decode(flags);
......@@ -3916,8 +3869,7 @@ void Code::set_has_debug_break_slots(bool value) {
WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
}
bool Code::has_reloc_info_for_serialization() {
bool Code::has_reloc_info_for_serialization() const {
DCHECK_EQ(FUNCTION, kind());
unsigned flags = READ_UINT32_FIELD(this, kFullCodeFlags);
return FullCodeFlagsHasRelocInfoForSerialization::decode(flags);
......@@ -3931,8 +3883,7 @@ void Code::set_has_reloc_info_for_serialization(bool value) {
WRITE_UINT32_FIELD(this, kFullCodeFlags, flags);
}
int Code::allow_osr_at_loop_nesting_level() {
int Code::allow_osr_at_loop_nesting_level() const {
DCHECK_EQ(FUNCTION, kind());
int fields = READ_UINT32_FIELD(this, kKindSpecificFlags2Offset);
return AllowOSRAtLoopNestingLevelField::decode(fields);
......@@ -3947,15 +3898,15 @@ void Code::set_allow_osr_at_loop_nesting_level(int level) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
int Code::builtin_index() { return READ_INT_FIELD(this, kBuiltinIndexOffset); }
int Code::builtin_index() const {
return READ_INT_FIELD(this, kBuiltinIndexOffset);
}
void Code::set_builtin_index(int index) {
WRITE_INT_FIELD(this, kBuiltinIndexOffset, index);
}
unsigned Code::stack_slots() {
unsigned Code::stack_slots() const {
DCHECK(is_crankshafted());
return StackSlotsField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
......@@ -3970,8 +3921,7 @@ void Code::set_stack_slots(unsigned slots) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
unsigned Code::safepoint_table_offset() {
unsigned Code::safepoint_table_offset() const {
DCHECK(is_crankshafted());
return SafepointTableOffsetField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset));
......@@ -3987,8 +3937,7 @@ void Code::set_safepoint_table_offset(unsigned offset) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
unsigned Code::back_edge_table_offset() {
unsigned Code::back_edge_table_offset() const {
DCHECK_EQ(FUNCTION, kind());
return BackEdgeTableOffsetField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags2Offset)) << kPointerSizeLog2;
......@@ -4004,8 +3953,7 @@ void Code::set_back_edge_table_offset(unsigned offset) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags2Offset, updated);
}
bool Code::back_edges_patched_for_osr() {
bool Code::back_edges_patched_for_osr() const {
DCHECK_EQ(FUNCTION, kind());
return allow_osr_at_loop_nesting_level() > 0;
}
......@@ -4013,8 +3961,7 @@ bool Code::back_edges_patched_for_osr() {
uint16_t Code::to_boolean_state() { return extra_ic_state(); }
bool Code::marked_for_deoptimization() {
bool Code::marked_for_deoptimization() const {
DCHECK(kind() == OPTIMIZED_FUNCTION);
return MarkedForDeoptimizationField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
......@@ -4029,7 +3976,7 @@ void Code::set_marked_for_deoptimization(bool flag) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::deopt_already_counted() {
bool Code::deopt_already_counted() const {
DCHECK(kind() == OPTIMIZED_FUNCTION);
return DeoptAlreadyCountedField::decode(
READ_UINT32_FIELD(this, kKindSpecificFlags1Offset));
......@@ -4043,7 +3990,7 @@ void Code::set_deopt_already_counted(bool flag) {
WRITE_UINT32_FIELD(this, kKindSpecificFlags1Offset, updated);
}
bool Code::is_inline_cache_stub() {
bool Code::is_inline_cache_stub() const {
Kind kind = this->kind();
switch (kind) {
#define CASE(name) case name: return true;
......@@ -4053,7 +4000,7 @@ bool Code::is_inline_cache_stub() {
}
}
bool Code::is_debug_stub() {
bool Code::is_debug_stub() const {
if (kind() != BUILTIN) return false;
switch (builtin_index()) {
#define CASE_DEBUG_BUILTIN(name) case Builtins::k##name:
......@@ -4065,11 +4012,11 @@ bool Code::is_debug_stub() {
}
return false;
}
bool Code::is_handler() { return kind() == HANDLER; }
bool Code::is_stub() { return kind() == STUB; }
bool Code::is_compare_ic_stub() { return kind() == COMPARE_IC; }
bool Code::is_optimized_code() { return kind() == OPTIMIZED_FUNCTION; }
bool Code::is_wasm_code() { return kind() == WASM_FUNCTION; }
bool Code::is_handler() const { return kind() == HANDLER; }
bool Code::is_stub() const { return kind() == STUB; }
bool Code::is_compare_ic_stub() const { return kind() == COMPARE_IC; }
bool Code::is_optimized_code() const { return kind() == OPTIMIZED_FUNCTION; }
bool Code::is_wasm_code() const { return kind() == WASM_FUNCTION; }
Address Code::constant_pool() {
Address constant_pool = NULL;
......@@ -4319,8 +4266,7 @@ uint32_t Map::bit_field3() const {
return READ_UINT32_FIELD(this, kBitField3Offset);
}
LayoutDescriptor* Map::GetLayoutDescriptor() {
LayoutDescriptor* Map::GetLayoutDescriptor() const {
return FLAG_unbox_double_fields ? layout_descriptor()
: LayoutDescriptor::FastPointerLayout();
}
......@@ -4341,8 +4287,7 @@ void Map::AppendDescriptor(Descriptor* desc) {
#endif
}
Object* Map::GetBackPointer() {
Object* Map::GetBackPointer() const {
Object* object = constructor_or_backpointer();
if (object->IsMap()) {
return object;
......@@ -4350,8 +4295,7 @@ Object* Map::GetBackPointer() {
return GetIsolate()->heap()->undefined_value();
}
Map* Map::ElementsTransitionMap() {
Map* Map::ElementsTransitionMap() const {
return TransitionArray::SearchSpecial(
this, GetHeap()->elements_transition_symbol());
}
......@@ -4751,8 +4695,7 @@ void JSFunction::CompleteInobjectSlackTrackingIfActive() {
}
}
bool Map::IsInobjectSlackTrackingInProgress() {
bool Map::IsInobjectSlackTrackingInProgress() const {
return construction_counter() != Map::kNoSlackTracking;
}
......@@ -5065,8 +5008,7 @@ void Code::WipeOutHeader() {
WRITE_FIELD(this, kNextCodeLinkOffset, NULL);
}
Object* Code::type_feedback_info() {
Object* Code::type_feedback_info() const {
DCHECK(kind() == FUNCTION);
return raw_type_feedback_info();
}
......@@ -5079,7 +5021,7 @@ void Code::set_type_feedback_info(Object* value, WriteBarrierMode mode) {
value, mode);
}
ByteArray* Code::SourcePositionTable() {
ByteArray* Code::SourcePositionTable() const {
Object* maybe_table = source_position_table();
if (maybe_table->IsByteArray()) return ByteArray::cast(maybe_table);
DCHECK(maybe_table->IsSourcePositionTableWithFrameCache());
......@@ -5087,7 +5029,7 @@ ByteArray* Code::SourcePositionTable() {
->source_position_table();
}
uint32_t Code::stub_key() {
uint32_t Code::stub_key() const {
DCHECK(IsCodeStubOrIC());
Smi* smi_key = Smi::cast(raw_type_feedback_info());
return static_cast<uint32_t>(smi_key->value());
......@@ -5099,13 +5041,11 @@ void Code::set_stub_key(uint32_t key) {
set_raw_type_feedback_info(Smi::FromInt(key));
}
byte* Code::instruction_start() {
return FIELD_ADDR(this, kHeaderSize);
byte* Code::instruction_start() const {
return const_cast<byte*>(FIELD_ADDR_CONST(this, kHeaderSize));
}
byte* Code::instruction_end() {
byte* Code::instruction_end() const {
return instruction_start() + instruction_size();
}
......@@ -5125,17 +5065,19 @@ void Code::set_unwinding_info_size(int value) {
WRITE_UINT64_FIELD(this, GetUnwindingInfoSizeOffset(), value);
}
byte* Code::unwinding_info_start() {
byte* Code::unwinding_info_start() const {
DCHECK(has_unwinding_info());
return FIELD_ADDR(this, GetUnwindingInfoSizeOffset()) + kInt64Size;
return const_cast<byte*>(
FIELD_ADDR_CONST(this, GetUnwindingInfoSizeOffset())) +
kInt64Size;
}
byte* Code::unwinding_info_end() {
byte* Code::unwinding_info_end() const {
DCHECK(has_unwinding_info());
return unwinding_info_start() + unwinding_info_size();
}
int Code::body_size() {
int Code::body_size() const {
int unpadded_body_size =
has_unwinding_info()
? static_cast<int>(unwinding_info_end() - instruction_start())
......@@ -5143,7 +5085,7 @@ int Code::body_size() {
return RoundUp(unpadded_body_size, kObjectAlignment);
}
int Code::SizeIncludingMetadata() {
int Code::SizeIncludingMetadata() const {
int size = CodeSize();
size += relocation_info()->Size();
size += deoptimization_data()->Size();
......@@ -5154,41 +5096,32 @@ int Code::SizeIncludingMetadata() {
return size;
}
ByteArray* Code::unchecked_relocation_info() {
ByteArray* Code::unchecked_relocation_info() const {
return reinterpret_cast<ByteArray*>(READ_FIELD(this, kRelocationInfoOffset));
}
byte* Code::relocation_start() {
byte* Code::relocation_start() const {
return unchecked_relocation_info()->GetDataStartAddress();
}
int Code::relocation_size() {
int Code::relocation_size() const {
return unchecked_relocation_info()->length();
}
byte* Code::entry() {
return instruction_start();
}
byte* Code::entry() const { return instruction_start(); }
bool Code::contains(byte* inner_pointer) {
return (address() <= inner_pointer) && (inner_pointer <= address() + Size());
}
int Code::ExecutableSize() {
int Code::ExecutableSize() const {
// Check that the assumptions about the layout of the code object holds.
DCHECK_EQ(static_cast<int>(instruction_start() - address()),
Code::kHeaderSize);
return instruction_size() + Code::kHeaderSize;
}
int Code::CodeSize() { return SizeFor(body_size()); }
int Code::CodeSize() const { return SizeFor(body_size()); }
ACCESSORS(JSArray, length, Object, kLengthOffset)
......
......@@ -2144,7 +2144,7 @@ Maybe<bool> JSReceiver::SetOrCopyDataProperties(
return Just(true);
}
Map* Object::GetPrototypeChainRootMap(Isolate* isolate) {
Map* Object::GetPrototypeChainRootMap(Isolate* isolate) const {
DisallowHeapAllocation no_alloc;
if (IsSmi()) {
Context* native_context = isolate->context()->native_context();
......@@ -2153,14 +2153,14 @@ Map* Object::GetPrototypeChainRootMap(Isolate* isolate) {
// The object is either a number, a string, a symbol, a boolean, a real JS
// object, or a Harmony proxy.
HeapObject* heap_object = HeapObject::cast(this);
const HeapObject* heap_object = HeapObject::cast(this);
return heap_object->map()->GetPrototypeChainRootMap(isolate);
}
Map* Map::GetPrototypeChainRootMap(Isolate* isolate) {
Map* Map::GetPrototypeChainRootMap(Isolate* isolate) const {
DisallowHeapAllocation no_alloc;
if (IsJSReceiverMap()) {
return this;
return const_cast<Map*>(this);
}
int constructor_function_index = GetConstructorFunctionIndex();
if (constructor_function_index != Map::kNoConstructorFunctionIndex) {
......@@ -3458,11 +3458,13 @@ Handle<Context> JSReceiver::GetCreationContext() {
: Handle<Context>::null();
}
// static
Handle<Object> Map::WrapFieldType(Handle<FieldType> type) {
if (type->IsClass()) return Map::WeakCellForMap(type->AsClass());
return type;
}
// static
FieldType* Map::UnwrapFieldType(Object* wrapped_type) {
Object* value = wrapped_type;
if (value->IsWeakCell()) {
......@@ -3550,7 +3552,7 @@ const char* Representation::Mnemonic() const {
}
}
bool Map::TransitionRemovesTaggedField(Map* target) {
bool Map::TransitionRemovesTaggedField(Map* target) const {
int inobject = NumberOfFields();
int target_inobject = target->NumberOfFields();
for (int i = target_inobject; i < inobject; i++) {
......@@ -3560,7 +3562,7 @@ bool Map::TransitionRemovesTaggedField(Map* target) {
return false;
}
bool Map::TransitionChangesTaggedFieldToUntaggedField(Map* target) {
bool Map::TransitionChangesTaggedFieldToUntaggedField(Map* target) const {
int inobject = NumberOfFields();
int target_inobject = target->NumberOfFields();
int limit = Min(inobject, target_inobject);
......@@ -3573,12 +3575,12 @@ bool Map::TransitionChangesTaggedFieldToUntaggedField(Map* target) {
return false;
}
bool Map::TransitionRequiresSynchronizationWithGC(Map* target) {
bool Map::TransitionRequiresSynchronizationWithGC(Map* target) const {
return TransitionRemovesTaggedField(target) ||
TransitionChangesTaggedFieldToUntaggedField(target);
}
bool Map::InstancesNeedRewriting(Map* target) {
bool Map::InstancesNeedRewriting(Map* target) const {
int target_number_of_fields = target->NumberOfFields();
int target_inobject = target->GetInObjectProperties();
int target_unused = target->unused_property_fields();
......@@ -3591,7 +3593,7 @@ bool Map::InstancesNeedRewriting(Map* target) {
bool Map::InstancesNeedRewriting(Map* target, int target_number_of_fields,
int target_inobject, int target_unused,
int* old_number_of_fields) {
int* old_number_of_fields) const {
// If fields were added (or removed), rewrite the instance.
*old_number_of_fields = NumberOfFields();
DCHECK(target_number_of_fields >= *old_number_of_fields);
......@@ -4051,7 +4053,7 @@ void JSObject::ForceSetPrototype(Handle<JSObject> object,
JSObject::MigrateToMap(object, new_map);
}
int Map::NumberOfFields() {
int Map::NumberOfFields() const {
DescriptorArray* descriptors = instance_descriptors();
int result = 0;
for (int i = 0; i < NumberOfOwnDescriptors(); i++) {
......@@ -4175,9 +4177,8 @@ void Map::ReplaceDescriptors(DescriptorArray* new_descriptors,
set_owns_descriptors(false);
}
Map* Map::FindRootMap() {
Map* result = this;
Map* Map::FindRootMap() const {
const Map* result = this;
Isolate* isolate = GetIsolate();
while (true) {
Object* back = result->GetBackPointer();
......@@ -4187,26 +4188,25 @@ Map* Map::FindRootMap() {
DCHECK(result->owns_descriptors());
DCHECK_EQ(result->NumberOfOwnDescriptors(),
result->instance_descriptors()->number_of_descriptors());
return result;
return const_cast<Map*>(result);
}
result = Map::cast(back);
}
}
Map* Map::FindFieldOwner(int descriptor) {
Map* Map::FindFieldOwner(int descriptor) const {
DisallowHeapAllocation no_allocation;
DCHECK_EQ(kField, instance_descriptors()->GetDetails(descriptor).location());
Map* result = this;
const Map* result = this;
Isolate* isolate = GetIsolate();
while (true) {
Object* back = result->GetBackPointer();
if (back->IsUndefined(isolate)) break;
Map* parent = Map::cast(back);
const Map* parent = Map::cast(back);
if (parent->NumberOfOwnDescriptors() <= descriptor) break;
result = parent;
}
return result;
return const_cast<Map*>(result);
}
void Map::UpdateFieldType(int descriptor, Handle<Name> name,
......@@ -5112,8 +5112,7 @@ Map* Map::LookupElementsTransitionMap(ElementsKind to_kind) {
return nullptr;
}
bool Map::IsMapInArrayPrototypeChain() {
bool Map::IsMapInArrayPrototypeChain() const {
Isolate* isolate = GetIsolate();
if (isolate->initial_array_prototype()->map() == this) {
return true;
......@@ -8278,7 +8277,7 @@ bool JSObject::HasEnumerableElements() {
UNREACHABLE();
}
int Map::NumberOfEnumerableProperties() {
int Map::NumberOfEnumerableProperties() const {
int result = 0;
DescriptorArray* descs = instance_descriptors();
int limit = NumberOfOwnDescriptors();
......@@ -8291,8 +8290,7 @@ int Map::NumberOfEnumerableProperties() {
return result;
}
int Map::NextFreePropertyIndex() {
int Map::NextFreePropertyIndex() const {
int free_index = 0;
int number_of_own_descriptors = NumberOfOwnDescriptors();
DescriptorArray* descs = instance_descriptors();
......@@ -8306,8 +8304,7 @@ int Map::NextFreePropertyIndex() {
return free_index;
}
bool Map::OnlyHasSimpleProperties() {
bool Map::OnlyHasSimpleProperties() const {
// Wrapped string elements aren't explicitly stored in the elements backing
// store, but are loaded indirectly from the underlying string.
return !IsStringWrapperElementsKind(elements_kind()) &&
......@@ -11976,7 +11973,7 @@ int Map::Hash() {
namespace {
bool CheckEquivalent(Map* first, Map* second) {
bool CheckEquivalent(const Map* first, const Map* second) {
return first->GetConstructor() == second->GetConstructor() &&
first->prototype() == second->prototype() &&
first->instance_type() == second->instance_type() &&
......@@ -11988,8 +11985,7 @@ bool CheckEquivalent(Map* first, Map* second) {
} // namespace
bool Map::EquivalentToForTransition(Map* other) {
bool Map::EquivalentToForTransition(const Map* other) const {
if (!CheckEquivalent(this, other)) return false;
if (instance_type() == JS_FUNCTION_TYPE) {
// JSFunctions require more checks to ensure that sloppy function is
......@@ -12001,9 +11997,8 @@ bool Map::EquivalentToForTransition(Map* other) {
return true;
}
bool Map::EquivalentToForNormalization(Map* other,
PropertyNormalizationMode mode) {
bool Map::EquivalentToForNormalization(const Map* other,
PropertyNormalizationMode mode) const {
int properties =
mode == CLEAR_INOBJECT_PROPERTIES ? 0 : other->GetInObjectProperties();
return CheckEquivalent(this, other) && bit_field2() == other->bit_field2() &&
......
......@@ -1193,7 +1193,7 @@ class Object {
INLINE(bool IsNaN() const);
INLINE(bool IsMinusZero() const);
V8_EXPORT_PRIVATE bool ToInt32(int32_t* value);
inline bool ToUint32(uint32_t* value);
inline bool ToUint32(uint32_t* value) const;
inline Representation OptimalRepresentation();
......@@ -1472,12 +1472,12 @@ class Object {
// Tries to convert an object to an array length. Returns true and sets the
// output parameter if it succeeds.
inline bool ToArrayLength(uint32_t* index);
inline bool ToArrayLength(uint32_t* index) const;
// Tries to convert an object to an array index. Returns true and sets the
// output parameter if it succeeds. Equivalent to ToArrayLength, but does not
// allow kMaxUInt32.
inline bool ToArrayIndex(uint32_t* index);
inline bool ToArrayIndex(uint32_t* index) const;
// Returns true if the result of iterating over the object is the same
// (including observable effects) as simply accessing the properties between 0
......@@ -1521,7 +1521,7 @@ class Object {
friend class StringStream;
// Return the map of the root of object's prototype chain.
Map* GetPrototypeChainRootMap(Isolate* isolate);
Map* GetPrototypeChainRootMap(Isolate* isolate) const;
// Helper for SetProperty and SetSuperProperty.
// Return value is only meaningful if [found] is set to true on return.
......@@ -1636,8 +1636,7 @@ class MapWord BASE_EMBEDDED {
static inline MapWord FromMap(const Map* map);
// View this map word as a map pointer.
inline Map* ToMap();
inline Map* ToMap() const;
// Scavenge collection: the map word of live objects in the from space
// contains a forwarding address (a heap object pointer in the to space).
......@@ -1688,7 +1687,7 @@ class HeapObject: public Object {
inline void set_map_no_write_barrier(Map* value);
// Get the map using acquire load.
inline Map* synchronized_map();
inline Map* synchronized_map() const;
inline MapWord synchronized_map_word() const;
// Set the map using release store
......@@ -1733,8 +1732,9 @@ class HeapObject: public Object {
}
// Returns the address of this HeapObject.
inline Address address() {
return reinterpret_cast<Address>(this) - kHeapObjectTag;
inline Address address() const {
return reinterpret_cast<Address>(const_cast<HeapObject*>(this)) -
kHeapObjectTag;
}
// Iterates over pointers contained in the object (including the Map).
......@@ -1767,12 +1767,12 @@ class HeapObject: public Object {
bool IsValidSlot(int offset);
// Returns the heap object's size in bytes
inline int Size();
inline int Size() const;
// Given a heap object's map pointer, returns the heap size in bytes
// Useful when the map pointer field is used for other purposes.
// GC internal.
inline int SizeFromMap(Map* map);
inline int SizeFromMap(Map* map) const;
// Returns the field at offset in obj, as a read/write Object* reference.
// Does no checking, and is safe to use during GC, while maps are invalid.
......@@ -1813,7 +1813,7 @@ class HeapObject: public Object {
static void VerifyHeapPointer(Object* p);
#endif
inline AllocationAlignment RequiredAlignment();
inline AllocationAlignment RequiredAlignment() const;
// Layout description.
// First field in a heap object is map.
......@@ -2403,8 +2403,8 @@ class JSObject: public JSReceiver {
static inline int GetHeaderSize(InstanceType instance_type);
inline int GetHeaderSize();
static inline int GetEmbedderFieldCount(Map* map);
inline int GetEmbedderFieldCount();
static inline int GetEmbedderFieldCount(const Map* map);
inline int GetEmbedderFieldCount() const;
inline int GetEmbedderFieldOffset(int index);
inline Object* GetEmbedderField(int index);
inline void SetEmbedderField(int index, Object* value);
......@@ -3381,20 +3381,20 @@ class FixedTypedArrayBase: public FixedArrayBase {
// No weak fields.
typedef BodyDescriptor BodyDescriptorWeak;
inline int size();
inline int size() const;
static inline int TypedArraySize(InstanceType type, int length);
inline int TypedArraySize(InstanceType type);
inline int TypedArraySize(InstanceType type) const;
// Use with care: returns raw pointer into heap.
inline void* DataPtr();
inline int DataSize();
inline int DataSize() const;
private:
static inline int ElementSize(InstanceType type);
inline int DataSize(InstanceType type);
inline int DataSize(InstanceType type) const;
DISALLOW_IMPLICIT_CONSTRUCTORS(FixedTypedArrayBase);
};
......@@ -3622,7 +3622,7 @@ class Code: public HeapObject {
// SourcePositionTableWithFrameCache.
DECL_ACCESSORS(source_position_table, Object)
inline ByteArray* SourcePositionTable();
inline ByteArray* SourcePositionTable() const;
// [trap_handler_index]: An index into the trap handler's master list of code
// objects.
......@@ -3633,10 +3633,10 @@ class Code: public HeapObject {
// FUNCTION => type feedback information.
// STUB and ICs => major/minor key as Smi.
DECL_ACCESSORS(raw_type_feedback_info, Object)
inline Object* type_feedback_info();
inline Object* type_feedback_info() const;
inline void set_type_feedback_info(
Object* value, WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
inline uint32_t stub_key();
inline uint32_t stub_key() const;
inline void set_stub_key(uint32_t key);
// [next_code_link]: Link for lists of optimized or deoptimized code.
......@@ -3654,76 +3654,76 @@ class Code: public HeapObject {
inline void set_constant_pool_offset(int offset);
// Unchecked accessors to be used during GC.
inline ByteArray* unchecked_relocation_info();
inline ByteArray* unchecked_relocation_info() const;
inline int relocation_size();
inline int relocation_size() const;
// [flags]: Various code flags.
inline Flags flags();
inline Flags flags() const;
inline void set_flags(Flags flags);
// [flags]: Access to specific code flags.
inline Kind kind();
inline ExtraICState extra_ic_state(); // Only valid for IC stubs.
inline Kind kind() const;
inline ExtraICState extra_ic_state() const; // Only valid for IC stubs.
// Testers for IC stub kinds.
inline bool is_inline_cache_stub();
inline bool is_debug_stub();
inline bool is_handler();
inline bool is_stub();
inline bool is_compare_ic_stub();
inline bool is_optimized_code();
inline bool is_wasm_code();
inline bool is_inline_cache_stub() const;
inline bool is_debug_stub() const;
inline bool is_handler() const;
inline bool is_stub() const;
inline bool is_compare_ic_stub() const;
inline bool is_optimized_code() const;
inline bool is_wasm_code() const;
inline bool IsCodeStubOrIC();
inline bool IsCodeStubOrIC() const;
inline void set_raw_kind_specific_flags1(int value);
inline void set_raw_kind_specific_flags2(int value);
// Testers for interpreter builtins.
inline bool is_interpreter_trampoline_builtin();
inline bool is_interpreter_trampoline_builtin() const;
// Tells whether the code checks the optimization marker in the function's
// feedback vector.
inline bool checks_optimization_marker();
inline bool checks_optimization_marker() const;
// [is_crankshafted]: For kind STUB or ICs, tells whether or not a code
// object was generated by either the hydrogen or the TurboFan optimizing
// compiler (but it may not be an optimized function).
inline bool is_crankshafted();
inline bool is_hydrogen_stub(); // Crankshafted, but not a function.
inline bool is_crankshafted() const;
inline bool is_hydrogen_stub() const; // Crankshafted, but not a function.
inline void set_is_crankshafted(bool value);
// [has_tagged_params]: For compiled code or builtins: Tells whether the
// outgoing parameters of this code are tagged pointers. True for other kinds.
inline bool has_tagged_params();
inline bool has_tagged_params() const;
inline void set_has_tagged_params(bool value);
// [is_turbofanned]: For kind STUB or OPTIMIZED_FUNCTION, tells whether the
// code object was generated by the TurboFan optimizing compiler.
inline bool is_turbofanned();
inline bool is_turbofanned() const;
inline void set_is_turbofanned(bool value);
// [can_have_weak_objects]: For kind OPTIMIZED_FUNCTION, tells whether the
// embedded objects in code should be treated weakly.
inline bool can_have_weak_objects();
inline bool can_have_weak_objects() const;
inline void set_can_have_weak_objects(bool value);
// [is_construct_stub]: For kind BUILTIN, tells whether the code object
// represents a hand-written construct stub
// (e.g., NumberConstructor_ConstructStub).
inline bool is_construct_stub();
inline bool is_construct_stub() const;
inline void set_is_construct_stub(bool value);
// [has_debug_break_slots]: For FUNCTION kind, tells if it has
// been compiled with debug break slots.
inline bool has_debug_break_slots();
inline bool has_debug_break_slots() const;
inline void set_has_debug_break_slots(bool value);
// [has_reloc_info_for_serialization]: For FUNCTION kind, tells if its
// reloc info includes runtime and external references to support
// serialization/deserialization.
inline bool has_reloc_info_for_serialization();
inline bool has_reloc_info_for_serialization() const;
inline void set_has_reloc_info_for_serialization(bool value);
// [allow_osr_at_loop_nesting_level]: For FUNCTION kind, tells for
......@@ -3731,42 +3731,42 @@ class Code: public HeapObject {
// level of loop nesting we are willing to do on-stack replacement
// for.
inline void set_allow_osr_at_loop_nesting_level(int level);
inline int allow_osr_at_loop_nesting_level();
inline int allow_osr_at_loop_nesting_level() const;
// [builtin_index]: For builtins, tells which builtin index the code object
// has. Note that builtins can have a code kind other than BUILTIN. The
// builtin index is a non-negative integer for builtins, and -1 otherwise.
inline int builtin_index();
inline int builtin_index() const;
inline void set_builtin_index(int id);
// [stack_slots]: For kind OPTIMIZED_FUNCTION, the number of stack slots
// reserved in the code prologue.
inline unsigned stack_slots();
inline unsigned stack_slots() const;
inline void set_stack_slots(unsigned slots);
// [safepoint_table_start]: For kind OPTIMIZED_FUNCTION, the offset in
// the instruction stream where the safepoint table starts.
inline unsigned safepoint_table_offset();
inline unsigned safepoint_table_offset() const;
inline void set_safepoint_table_offset(unsigned offset);
// [back_edge_table_start]: For kind FUNCTION, the offset in the
// instruction stream where the back edge table starts.
inline unsigned back_edge_table_offset();
inline unsigned back_edge_table_offset() const;
inline void set_back_edge_table_offset(unsigned offset);
inline bool back_edges_patched_for_osr();
inline bool back_edges_patched_for_osr() const;
// [to_boolean_foo]: For kind TO_BOOLEAN_IC tells what state the stub is in.
inline uint16_t to_boolean_state();
// [marked_for_deoptimization]: For kind OPTIMIZED_FUNCTION tells whether
// the code is going to be deoptimized because of dead embedded maps.
inline bool marked_for_deoptimization();
inline bool marked_for_deoptimization() const;
inline void set_marked_for_deoptimization(bool flag);
// [deopt_already_counted]: For kind OPTIMIZED_FUNCTION tells whether
// the code was already deoptimized.
inline bool deopt_already_counted();
inline bool deopt_already_counted() const;
inline void set_deopt_already_counted(bool flag);
// [is_promise_rejection]: For kind BUILTIN tells whether the
......@@ -3832,21 +3832,21 @@ class Code: public HeapObject {
static inline Object* GetObjectFromCodeEntry(Address code_entry);
// Returns the address of the first instruction.
inline byte* instruction_start();
inline byte* instruction_start() const;
// Returns the address right after the last instruction.
inline byte* instruction_end();
inline byte* instruction_end() const;
// Returns the size of the instructions, padding, relocation and unwinding
// information.
inline int body_size();
inline int body_size() const;
// Returns the size of code and its metadata. This includes the size of code
// relocation information, deoptimization data and handler table.
inline int SizeIncludingMetadata();
inline int SizeIncludingMetadata() const;
// Returns the address of the first relocation info (read backwards!).
inline byte* relocation_start();
inline byte* relocation_start() const;
// [has_unwinding_info]: Whether this code object has unwinding information.
// If it doesn't, unwinding_information_start() will point to invalid data.
......@@ -3886,13 +3886,13 @@ class Code: public HeapObject {
inline void set_unwinding_info_size(int value);
// Returns the address of the unwinding information, if any.
inline byte* unwinding_info_start();
inline byte* unwinding_info_start() const;
// Returns the address right after the end of the unwinding information.
inline byte* unwinding_info_end();
inline byte* unwinding_info_end() const;
// Code entry point.
inline byte* entry();
inline byte* entry() const;
// Returns true if pc is inside this object's instructions.
inline bool contains(byte* pc);
......@@ -3912,12 +3912,12 @@ class Code: public HeapObject {
// Calculate the size of the code object to report for log events. This takes
// the layout of the code object into account.
inline int ExecutableSize();
inline int ExecutableSize() const;
DECL_CAST(Code)
// Dispatched behavior.
inline int CodeSize();
inline int CodeSize() const;
DECL_PRINTER(Code)
DECL_VERIFIER(Code)
......@@ -4111,8 +4111,8 @@ class Code: public HeapObject {
// Code aging -- platform-specific
static void PatchPlatformCodeAge(Isolate* isolate, byte* sequence, Age age);
bool is_promise_rejection();
bool is_exception_caught();
bool is_promise_rejection() const;
bool is_exception_caught() const;
DISALLOW_IMPLICIT_CONSTRUCTORS(Code);
};
......
......@@ -68,13 +68,13 @@ class BaseShape {
class V8_EXPORT_PRIVATE HashTableBase : public NON_EXPORTED_BASE(FixedArray) {
public:
// Returns the number of elements in the hash table.
inline int NumberOfElements();
inline int NumberOfElements() const;
// Returns the number of deleted elements in the hash table.
inline int NumberOfDeletedElements();
inline int NumberOfDeletedElements() const;
// Returns the capacity of the hash table.
inline int Capacity();
inline int Capacity() const;
// ElementAdded should be called whenever an element is added to a
// hash table.
......@@ -678,7 +678,9 @@ class SmallOrderedHashTable : public HeapObject {
return ((padding_offset + kPointerSize - 1) / kPointerSize) * kPointerSize;
}
int GetDataTableStartOffset() { return GetDataTableStartOffset(Capacity()); }
int GetDataTableStartOffset() const {
return GetDataTableStartOffset(Capacity());
}
static int Size(int capacity) {
int data_table_start = GetDataTableStartOffset(capacity);
......@@ -686,39 +688,45 @@ class SmallOrderedHashTable : public HeapObject {
return data_table_start + data_table_size;
}
int Size() { return Size(Capacity()); }
int Size() const { return Size(Capacity()); }
void SetFirstEntry(int bucket, byte value) {
set(kBucketsStartOffset + bucket, value);
}
int GetFirstEntry(int bucket) { return get(kBucketsStartOffset + bucket); }
int GetFirstEntry(int bucket) const {
return get(kBucketsStartOffset + bucket);
}
void SetNextEntry(int entry, int next_entry) {
set(GetChainTableOffset() + entry, next_entry);
}
int GetNextEntry(int entry) { return get(GetChainTableOffset() + entry); }
int GetNextEntry(int entry) const {
return get(GetChainTableOffset() + entry);
}
Object* GetDataEntry(int entry, int relative_index) {
int entry_offset = GetDataEntryOffset(entry, relative_index);
return READ_FIELD(this, entry_offset);
}
Object* KeyAt(int entry) {
Object* KeyAt(int entry) const {
int entry_offset = GetDataEntryOffset(entry, Derived::kKeyIndex);
return READ_FIELD(this, entry_offset);
}
int HashToBucket(int hash) { return hash & (NumberOfBuckets() - 1); }
int HashToBucket(int hash) const { return hash & (NumberOfBuckets() - 1); }
int HashToFirstEntry(int hash) {
int HashToFirstEntry(int hash) const {
int bucket = HashToBucket(hash);
int entry = GetFirstEntry(bucket);
return entry;
}
int GetChainTableOffset() { return kBucketsStartOffset + NumberOfBuckets(); }
int GetChainTableOffset() const {
return kBucketsStartOffset + NumberOfBuckets();
}
void SetNumberOfBuckets(int num) { set(kNumberOfBucketsOffset, num); }
......@@ -728,11 +736,13 @@ class SmallOrderedHashTable : public HeapObject {
set(kNumberOfDeletedElementsOffset, num);
}
int NumberOfElements() { return get(kNumberOfElementsOffset); }
int NumberOfElements() const { return get(kNumberOfElementsOffset); }
int NumberOfDeletedElements() { return get(kNumberOfDeletedElementsOffset); }
int NumberOfDeletedElements() const {
return get(kNumberOfDeletedElementsOffset);
}
int NumberOfBuckets() { return get(kNumberOfBucketsOffset); }
int NumberOfBuckets() const { return get(kNumberOfBucketsOffset); }
static const byte kNotFound = 0xFF;
static const int kMinCapacity = 4;
......@@ -768,7 +778,7 @@ class SmallOrderedHashTable : public HeapObject {
protected:
// This is used for accessing the non |DataTable| part of the
// structure.
byte get(int index) {
byte get(int index) const {
return READ_BYTE_FIELD(this, kHeaderSize + (index * kOneByteSize));
}
......@@ -776,7 +786,7 @@ class SmallOrderedHashTable : public HeapObject {
WRITE_BYTE_FIELD(this, kHeaderSize + (index * kOneByteSize), value);
}
int GetDataEntryOffset(int entry, int relative_index) {
int GetDataEntryOffset(int entry, int relative_index) const {
int datatable_start = GetDataTableStartOffset();
int offset_in_datatable = entry * Derived::kEntrySize * kPointerSize;
int offset_in_entry = relative_index * kPointerSize;
......@@ -784,9 +794,11 @@ class SmallOrderedHashTable : public HeapObject {
}
// Returns the number elements that can fit into the allocated buffer.
int Capacity() { return NumberOfBuckets() * kLoadFactor; }
int Capacity() const { return NumberOfBuckets() * kLoadFactor; }
int UsedCapacity() { return NumberOfElements() + NumberOfDeletedElements(); }
int UsedCapacity() const {
return NumberOfElements() + NumberOfDeletedElements();
}
};
class SmallOrderedHashSet : public SmallOrderedHashTable<SmallOrderedHashSet> {
......
......@@ -79,7 +79,7 @@ class Map : public HeapObject {
// Instance size.
// Size in bytes or kVariableSizeSentinel if instances do not have
// a fixed size.
inline int instance_size();
inline int instance_size() const;
inline void set_instance_size(int value);
// Only to clear an unused byte, remove once byte is used.
......@@ -88,16 +88,16 @@ class Map : public HeapObject {
// [inobject_properties_or_constructor_function_index]: Provides access
// to the inobject properties in case of JSObject maps, or the constructor
// function index in case of primitive maps.
inline int inobject_properties_or_constructor_function_index();
inline int inobject_properties_or_constructor_function_index() const;
inline void set_inobject_properties_or_constructor_function_index(int value);
// Count of properties allocated in the object (JSObject only).
inline int GetInObjectProperties();
inline int GetInObjectProperties() const;
inline void SetInObjectProperties(int value);
// Index of the constructor function in the native context (primitives only),
// or the special sentinel value to indicate that there is no object wrapper
// for the primitive (i.e. in case of null or undefined).
static const int kNoConstructorFunctionIndex = 0;
inline int GetConstructorFunctionIndex();
inline int GetConstructorFunctionIndex() const;
inline void SetConstructorFunctionIndex(int value);
static MaybeHandle<JSFunction> GetConstructorFunction(
Handle<Map> map, Handle<Context> native_context);
......@@ -107,12 +107,12 @@ class Map : public HeapObject {
inline InterceptorInfo* GetIndexedInterceptor();
// Instance type.
inline InstanceType instance_type();
inline InstanceType instance_type() const;
inline void set_instance_type(InstanceType value);
// Tells how many unused property fields are available in the
// instance (only used for JSObject in fast mode).
inline int unused_property_fields();
inline int unused_property_fields() const;
inline void set_unused_property_fields(int value);
// Bit field.
......@@ -195,7 +195,7 @@ class Map : public HeapObject {
// True if the object constructions countdown counter is a range
// [kSlackTrackingCounterEnd, kSlackTrackingCounterStart].
inline bool IsInobjectSlackTrackingInProgress();
inline bool IsInobjectSlackTrackingInProgress() const;
// Does the tracking step.
inline void InobjectSlackTrackingStep();
......@@ -210,7 +210,7 @@ class Map : public HeapObject {
// property will not be used to create instances of the function.
// See ECMA-262, 13.2.2.
inline void set_non_instance_prototype(bool value);
inline bool has_non_instance_prototype();
inline bool has_non_instance_prototype() const;
// Tells whether the instance has a [[Construct]] internal method.
// This property is implemented according to ES6, section 7.2.4.
......@@ -223,11 +223,11 @@ class Map : public HeapObject {
// Records and queries whether the instance has a named interceptor.
inline void set_has_named_interceptor();
inline bool has_named_interceptor();
inline bool has_named_interceptor() const;
// Records and queries whether the instance has an indexed interceptor.
inline void set_has_indexed_interceptor();
inline bool has_indexed_interceptor();
inline bool has_indexed_interceptor() const;
// Tells whether the instance is undetectable.
// An undetectable object is a special class of JSObject: 'typeof' operator
......@@ -236,7 +236,7 @@ class Map : public HeapObject {
// document.all in Firefox & Safari.
// See https://bugzilla.mozilla.org/show_bug.cgi?id=248549.
inline void set_is_undetectable();
inline bool is_undetectable();
inline bool is_undetectable() const;
// Tells whether the instance has a [[Call]] internal method.
// This property is implemented according to ES6, section 7.2.3.
......@@ -244,28 +244,28 @@ class Map : public HeapObject {
inline bool is_callable() const;
inline void set_new_target_is_base(bool value);
inline bool new_target_is_base();
inline bool new_target_is_base() const;
inline void set_is_extensible(bool value);
inline bool is_extensible();
inline bool is_extensible() const;
inline void set_is_prototype_map(bool value);
inline bool is_prototype_map() const;
inline void set_elements_kind(ElementsKind elements_kind);
inline ElementsKind elements_kind();
inline ElementsKind elements_kind() const;
// Tells whether the instance has fast elements that are only Smis.
inline bool has_fast_smi_elements();
inline bool has_fast_smi_elements() const;
// Tells whether the instance has fast elements.
inline bool has_fast_object_elements();
inline bool has_fast_smi_or_object_elements();
inline bool has_fast_double_elements();
inline bool has_fast_elements();
inline bool has_sloppy_arguments_elements();
inline bool has_fast_sloppy_arguments_elements();
inline bool has_fast_string_wrapper_elements();
inline bool has_fixed_typed_array_elements();
inline bool has_dictionary_elements();
inline bool has_fast_object_elements() const;
inline bool has_fast_smi_or_object_elements() const;
inline bool has_fast_double_elements() const;
inline bool has_fast_elements() const;
inline bool has_sloppy_arguments_elements() const;
inline bool has_fast_sloppy_arguments_elements() const;
inline bool has_fast_string_wrapper_elements() const;
inline bool has_fixed_typed_array_elements() const;
inline bool has_dictionary_elements() const;
static bool IsValidElementsTransition(ElementsKind from_kind,
ElementsKind to_kind);
......@@ -274,9 +274,9 @@ class Map : public HeapObject {
// map with DICTIONARY_ELEMENTS was found in the prototype chain.
bool DictionaryElementsInPrototypeChainOnly();
inline Map* ElementsTransitionMap();
inline Map* ElementsTransitionMap() const;
inline FixedArrayBase* GetInitialElements();
inline FixedArrayBase* GetInitialElements() const;
// [raw_transitions]: Provides access to the transitions storage field.
// Don't call set_raw_transitions() directly to overwrite transitions, use
......@@ -305,35 +305,35 @@ class Map : public HeapObject {
static const int kPrototypeChainInvalid = 1;
// Return the map of the root of object's prototype chain.
Map* GetPrototypeChainRootMap(Isolate* isolate);
Map* GetPrototypeChainRootMap(Isolate* isolate) const;
// Returns a WeakCell object containing given prototype. The cell is cached
// in PrototypeInfo which is created lazily.
static Handle<WeakCell> GetOrCreatePrototypeWeakCell(
Handle<JSObject> prototype, Isolate* isolate);
Map* FindRootMap();
Map* FindFieldOwner(int descriptor);
Map* FindRootMap() const;
Map* FindFieldOwner(int descriptor) const;
inline int GetInObjectPropertyOffset(int index);
inline int GetInObjectPropertyOffset(int index) const;
int NumberOfFields();
int NumberOfFields() const;
// Returns true if transition to the given map requires special
// synchronization with the concurrent marker.
bool TransitionRequiresSynchronizationWithGC(Map* target);
bool TransitionRequiresSynchronizationWithGC(Map* target) const;
// Returns true if transition to the given map removes a tagged in-object
// field.
bool TransitionRemovesTaggedField(Map* target);
bool TransitionRemovesTaggedField(Map* target) const;
// Returns true if transition to the given map replaces a tagged in-object
// field with an untagged in-object field.
bool TransitionChangesTaggedFieldToUntaggedField(Map* target);
bool TransitionChangesTaggedFieldToUntaggedField(Map* target) const;
// TODO(ishell): candidate with JSObject::MigrateToMap().
bool InstancesNeedRewriting(Map* target);
bool InstancesNeedRewriting(Map* target) const;
bool InstancesNeedRewriting(Map* target, int target_number_of_fields,
int target_inobject, int target_unused,
int* old_number_of_fields);
int* old_number_of_fields) const;
// TODO(ishell): moveit!
static Handle<Map> GeneralizeAllFields(Handle<Map> map);
MUST_USE_RESULT static Handle<FieldType> GeneralizeFieldType(
......@@ -366,12 +366,12 @@ class Map : public HeapObject {
// A map can never be used for both dictionary mode and fast mode JSObjects.
// False by default and for HeapObjects that are not JSObjects.
inline void set_dictionary_map(bool value);
inline bool is_dictionary_map();
inline bool is_dictionary_map() const;
// Tells whether the instance needs security checks when accessing its
// properties.
inline void set_is_access_check_needed(bool access_check_needed);
inline bool is_access_check_needed();
inline bool is_access_check_needed() const;
// [prototype]: implicit prototype object.
DECL_ACCESSORS(prototype, Object)
......@@ -392,7 +392,7 @@ class Map : public HeapObject {
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
// [back pointer]: points back to the parent map from which a transition
// leads to this map. The field overlaps with the constructor (see above).
inline Object* GetBackPointer();
inline Object* GetBackPointer() const;
inline void SetBackPointer(Object* value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
......@@ -408,7 +408,7 @@ class Map : public HeapObject {
// |layout descriptor| accessor that is safe to call even when
// FLAG_unbox_double_fields is disabled (in this case Map does not contain
// |layout_descriptor| field at all).
inline LayoutDescriptor* GetLayoutDescriptor();
inline LayoutDescriptor* GetLayoutDescriptor() const;
inline void UpdateDescriptors(DescriptorArray* descriptors,
LayoutDescriptor* layout_descriptor);
......@@ -424,11 +424,11 @@ class Map : public HeapObject {
// [weak cell cache]: cache that stores a weak cell pointing to this map.
DECL_ACCESSORS(weak_cell_cache, Object)
inline PropertyDetails GetLastDescriptorDetails();
inline PropertyDetails GetLastDescriptorDetails() const;
inline int LastAdded();
inline int LastAdded() const;
inline int NumberOfOwnDescriptors();
inline int NumberOfOwnDescriptors() const;
inline void SetNumberOfOwnDescriptors(int number);
inline Cell* RetrieveDescriptorsPointer();
......@@ -436,23 +436,23 @@ class Map : public HeapObject {
// Checks whether all properties are stored either in the map or on the object
// (inobject, properties, or elements backing store), requiring no special
// checks.
bool OnlyHasSimpleProperties();
inline int EnumLength();
bool OnlyHasSimpleProperties() const;
inline int EnumLength() const;
inline void SetEnumLength(int length);
inline bool owns_descriptors();
inline bool owns_descriptors() const;
inline void set_owns_descriptors(bool owns_descriptors);
inline void mark_unstable();
inline bool is_stable();
inline bool is_stable() const;
inline void set_migration_target(bool value);
inline bool is_migration_target();
inline bool is_migration_target() const;
inline void set_immutable_proto(bool value);
inline bool is_immutable_proto();
inline bool is_immutable_proto() const;
inline void set_construction_counter(int value);
inline int construction_counter();
inline int construction_counter() const;
inline void deprecate();
inline bool is_deprecated();
inline bool CanBeDeprecated();
inline bool is_deprecated() const;
inline bool CanBeDeprecated() const;
// Returns a non-deprecated version of the input. If the input was not
// deprecated, it is directly returned. Otherwise, the non-deprecated version
// is found by re-transitioning from the root of the transition tree using the
......@@ -512,7 +512,7 @@ class Map : public HeapObject {
// Maximal number of fast properties. Used to restrict the number of map
// transitions to avoid an explosion in the number of maps for objects used as
// dictionaries.
inline bool TooManyFastProperties(StoreFromKeyed store_mode);
inline bool TooManyFastProperties(StoreFromKeyed store_mode) const;
static Handle<Map> TransitionToDataProperty(Handle<Map> map,
Handle<Name> name,
Handle<Object> value,
......@@ -541,10 +541,10 @@ class Map : public HeapObject {
static Handle<Map> Create(Isolate* isolate, int inobject_properties);
// Returns the next free property index (only valid for FAST MODE).
int NextFreePropertyIndex();
int NextFreePropertyIndex() const;
// Returns the number of enumerable properties.
int NumberOfEnumerableProperties();
int NumberOfEnumerableProperties() const;
DECL_CAST(Map)
......@@ -578,31 +578,31 @@ class Map : public HeapObject {
// found at all.
Map* FindElementsKindTransitionedMap(MapHandles const& candidates);
inline bool CanTransition();
inline bool CanTransition() const;
inline bool IsBooleanMap();
inline bool IsPrimitiveMap();
inline bool IsJSReceiverMap();
inline bool IsJSObjectMap();
inline bool IsJSArrayMap();
inline bool IsJSFunctionMap();
inline bool IsStringMap();
inline bool IsJSProxyMap();
inline bool IsModuleMap();
inline bool IsJSGlobalProxyMap();
inline bool IsJSGlobalObjectMap();
inline bool IsJSTypedArrayMap();
inline bool IsJSDataViewMap();
inline bool IsBooleanMap() const;
inline bool IsPrimitiveMap() const;
inline bool IsJSReceiverMap() const;
inline bool IsJSObjectMap() const;
inline bool IsJSArrayMap() const;
inline bool IsJSFunctionMap() const;
inline bool IsStringMap() const;
inline bool IsJSProxyMap() const;
inline bool IsModuleMap() const;
inline bool IsJSGlobalProxyMap() const;
inline bool IsJSGlobalObjectMap() const;
inline bool IsJSTypedArrayMap() const;
inline bool IsJSDataViewMap() const;
inline bool IsSpecialReceiverMap();
inline bool IsSpecialReceiverMap() const;
inline bool CanOmitMapChecks();
inline bool CanOmitMapChecks() const;
static void AddDependentCode(Handle<Map> map,
DependentCode::DependencyGroup group,
Handle<Code> code);
bool IsMapInArrayPrototypeChain();
bool IsMapInArrayPrototypeChain() const;
static Handle<WeakCell> WeakCellForMap(Handle<Map> map);
......@@ -615,7 +615,7 @@ class Map : public HeapObject {
void VerifyOmittedMapChecks();
#endif
inline int visitor_id();
inline int visitor_id() const;
inline void set_visitor_id(int visitor_id);
static Handle<Map> TransitionToPrototype(Handle<Map> map,
......@@ -728,10 +728,11 @@ class Map : public HeapObject {
// If |mode| is set to CLEAR_INOBJECT_PROPERTIES, |other| is treated as if
// it had exactly zero inobject properties.
// The "shared" flags of both this map and |other| are ignored.
bool EquivalentToForNormalization(Map* other, PropertyNormalizationMode mode);
bool EquivalentToForNormalization(const Map* other,
PropertyNormalizationMode mode) const;
// Returns true if given field is unboxed double.
inline bool IsUnboxedDoubleField(FieldIndex index);
inline bool IsUnboxedDoubleField(FieldIndex index) const;
#if V8_TRACE_MAPS
static void TraceTransition(const char* what, Map* from, Map* to, Name* name);
......@@ -764,7 +765,7 @@ class Map : public HeapObject {
static void ConnectTransition(Handle<Map> parent, Handle<Map> child,
Handle<Name> name, SimpleTransitionFlag flag);
bool EquivalentToForTransition(Map* other);
bool EquivalentToForTransition(const Map* other) const;
static Handle<Map> RawCopy(Handle<Map> map, int instance_size);
static Handle<Map> ShareDescriptor(Handle<Map> map,
Handle<DescriptorArray> descriptors,
......
......@@ -248,7 +248,7 @@
#define RELAXED_READ_BYTE_FIELD(p, offset) \
static_cast<byte>(base::Relaxed_Load( \
reinterpret_cast<base::Atomic8*>(FIELD_ADDR(p, offset))))
reinterpret_cast<const base::Atomic8*>(FIELD_ADDR_CONST(p, offset))))
#define WRITE_BYTE_FIELD(p, offset, value) \
(*reinterpret_cast<byte*>(FIELD_ADDR(p, offset)) = value)
......
......@@ -181,7 +181,7 @@ Map* TransitionArray::SearchTransition(Map* map, PropertyKind kind, Name* name,
// static
Map* TransitionArray::SearchSpecial(Map* map, Symbol* name) {
Map* TransitionArray::SearchSpecial(const Map* map, Symbol* name) {
Object* raw_transitions = map->raw_transitions();
if (IsFullTransitionArray(raw_transitions)) {
TransitionArray* transitions = TransitionArray::cast(raw_transitions);
......
......@@ -51,7 +51,7 @@ class TransitionArray: public FixedArray {
return MaybeHandle<Map>();
}
static Map* SearchSpecial(Map* map, Symbol* name);
static Map* SearchSpecial(const Map* map, Symbol* name);
static Handle<Map> FindTransitionToField(Handle<Map> map, Handle<Name> name);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment