Commit ef07980e authored by Igor Sheludko's avatar Igor Sheludko Committed by Commit Bot

[cleanup] Fix kPointerSize usages in src/compiler/

Bug: v8:8477, v8:8562
Change-Id: I0dab49a03b74abc68600885f4951c5cb727a3d73
Reviewed-on: https://chromium-review.googlesource.com/c/1366736Reviewed-by: 's avatarTobias Tebbi <tebbi@chromium.org>
Reviewed-by: 's avatarJaroslav Sevcik <jarin@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/master@{#58364}
parent 1ca0de67
......@@ -414,7 +414,7 @@ FieldAccess AccessBuilder::ForJSDateValue() {
// static
FieldAccess AccessBuilder::ForJSDateField(JSDate::FieldIndex index) {
FieldAccess access = {
kTaggedBase, JSDate::kValueOffset + index * kPointerSize,
kTaggedBase, JSDate::kValueOffset + index * kTaggedSize,
MaybeHandle<Name>(), MaybeHandle<Map>(),
Type::Number(), MachineType::AnyTagged(),
kFullWriteBarrier};
......@@ -1103,7 +1103,7 @@ FieldAccess AccessBuilder::ForOrderedHashMapOrSetNumberOfElements() {
ElementAccess AccessBuilder::ForOrderedHashMapEntryValue() {
ElementAccess const access = {kTaggedBase,
OrderedHashMap::HashTableStartOffset() +
OrderedHashMap::kValueOffset * kPointerSize,
OrderedHashMap::kValueOffset * kTaggedSize,
Type::Any(), MachineType::AnyTagged(),
kFullWriteBarrier};
return access;
......
......@@ -41,7 +41,8 @@ static NodeVector::iterator FindInsertionPoint(BasicBlock* block) {
// TODO(dcarney): need to mark code as non-serializable.
static const Operator* PointerConstant(CommonOperatorBuilder* common,
intptr_t ptr) {
return kPointerSize == 8 ? common->Int64Constant(ptr)
return kSystemPointerSize == 8
? common->Int64Constant(ptr)
: common->Int32Constant(static_cast<int32_t>(ptr));
}
......
......@@ -3476,7 +3476,7 @@ Node* BytecodeGraphBuilder::MakeNode(const Operator* op, int value_input_count,
if (has_control) ++input_count_with_deps;
if (has_effect) ++input_count_with_deps;
Node** buffer = EnsureInputBufferSize(input_count_with_deps);
memcpy(buffer, value_inputs, kPointerSize * value_input_count);
memcpy(buffer, value_inputs, kSystemPointerSize * value_input_count);
Node** current_input = buffer + value_input_count;
if (has_context) {
*current_input++ = OperatorProperties::NeedsExactContext(op)
......
......@@ -81,7 +81,7 @@ struct IntegralT : UntaggedT {};
struct WordT : IntegralT {
static const MachineRepresentation kMachineRepresentation =
(kPointerSize == 4) ? MachineRepresentation::kWord32
(kSystemPointerSize == 4) ? MachineRepresentation::kWord32
: MachineRepresentation::kWord64;
};
......
......@@ -478,7 +478,7 @@ SlackTrackingPrediction::SlackTrackingPrediction(MapRef initial_map,
int instance_size)
: instance_size_(instance_size),
inobject_property_count_(
(instance_size >> kPointerSizeLog2) -
(instance_size >> kTaggedSizeLog2) -
initial_map.GetInObjectPropertiesStartInWords()) {}
SlackTrackingPrediction
......
......@@ -3058,7 +3058,7 @@ Node* EffectControlLinearizer::LowerNewSmiOrObjectElements(Node* node) {
// Compute the effective size of the backing store.
Node* size =
__ Int32Add(__ Word32Shl(length, __ Int32Constant(kPointerSizeLog2)),
__ Int32Add(__ Word32Shl(length, __ Int32Constant(kTaggedSizeLog2)),
__ Int32Constant(FixedArray::kHeaderSize));
// Allocate the result and initialize the header.
......@@ -3082,7 +3082,7 @@ Node* EffectControlLinearizer::LowerNewSmiOrObjectElements(Node* node) {
// Storing "the_hole" doesn't need a write barrier.
StoreRepresentation rep(MachineRepresentation::kTagged, kNoWriteBarrier);
Node* offset =
__ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2)),
__ IntAdd(__ WordShl(index, __ IntPtrConstant(kTaggedSizeLog2)),
__ IntPtrConstant(FixedArray::kHeaderSize - kHeapObjectTag));
__ Store(rep, result, offset, the_hole);
......@@ -4130,7 +4130,7 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
// The field is located in the {object} itself.
{
Node* offset =
__ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2 - 1)),
__ IntAdd(__ WordShl(index, __ IntPtrConstant(kTaggedSizeLog2 - 1)),
__ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
Node* result = __ Load(MachineType::AnyTagged(), object, offset);
__ Goto(&done, result);
......@@ -4144,8 +4144,8 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
__ LoadField(AccessBuilder::ForJSObjectPropertiesOrHash(), object);
Node* offset =
__ IntAdd(__ WordShl(__ IntSub(zero, index),
__ IntPtrConstant(kPointerSizeLog2 - 1)),
__ IntPtrConstant((FixedArray::kHeaderSize - kPointerSize) -
__ IntPtrConstant(kTaggedSizeLog2 - 1)),
__ IntPtrConstant((FixedArray::kHeaderSize - kTaggedSize) -
kHeapObjectTag));
Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
__ Goto(&done, result);
......@@ -4167,7 +4167,7 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
// The field is located in the {object} itself.
{
Node* offset =
__ IntAdd(__ WordShl(index, __ IntPtrConstant(kPointerSizeLog2)),
__ IntAdd(__ WordShl(index, __ IntPtrConstant(kTaggedSizeLog2)),
__ IntPtrConstant(JSObject::kHeaderSize - kHeapObjectTag));
if (FLAG_unbox_double_fields) {
Node* result = __ Load(MachineType::Float64(), object, offset);
......@@ -4185,8 +4185,8 @@ Node* EffectControlLinearizer::LowerLoadFieldByIndex(Node* node) {
__ LoadField(AccessBuilder::ForJSObjectPropertiesOrHash(), object);
Node* offset =
__ IntAdd(__ WordShl(__ IntSub(zero, index),
__ IntPtrConstant(kPointerSizeLog2)),
__ IntPtrConstant((FixedArray::kHeaderSize - kPointerSize) -
__ IntPtrConstant(kTaggedSizeLog2)),
__ IntPtrConstant((FixedArray::kHeaderSize - kTaggedSize) -
kHeapObjectTag));
Node* result = __ Load(MachineType::AnyTagged(), properties, offset);
result = __ LoadField(AccessBuilder::ForHeapNumberValue(), result);
......@@ -5227,7 +5227,7 @@ Node* EffectControlLinearizer::LowerFindOrderedHashMapEntryForInt32Key(
hash = __ WordAnd(hash, __ IntSub(number_of_buckets, __ IntPtrConstant(1)));
Node* first_entry = ChangeSmiToIntPtr(__ Load(
MachineType::TaggedSigned(), table,
__ IntAdd(__ WordShl(hash, __ IntPtrConstant(kPointerSizeLog2)),
__ IntAdd(__ WordShl(hash, __ IntPtrConstant(kTaggedSizeLog2)),
__ IntPtrConstant(OrderedHashMap::HashTableStartOffset() -
kHeapObjectTag))));
......@@ -5246,7 +5246,7 @@ Node* EffectControlLinearizer::LowerFindOrderedHashMapEntryForInt32Key(
Node* candidate_key = __ Load(
MachineType::AnyTagged(), table,
__ IntAdd(__ WordShl(entry, __ IntPtrConstant(kPointerSizeLog2)),
__ IntAdd(__ WordShl(entry, __ IntPtrConstant(kTaggedSizeLog2)),
__ IntPtrConstant(OrderedHashMap::HashTableStartOffset() -
kHeapObjectTag)));
......@@ -5275,9 +5275,9 @@ Node* EffectControlLinearizer::LowerFindOrderedHashMapEntryForInt32Key(
Node* next_entry = ChangeSmiToIntPtr(__ Load(
MachineType::TaggedSigned(), table,
__ IntAdd(
__ WordShl(entry, __ IntPtrConstant(kPointerSizeLog2)),
__ WordShl(entry, __ IntPtrConstant(kTaggedSizeLog2)),
__ IntPtrConstant(OrderedHashMap::HashTableStartOffset() +
OrderedHashMap::kChainOffset * kPointerSize -
OrderedHashMap::kChainOffset * kTaggedSize -
kHeapObjectTag))));
__ Goto(&loop, next_entry);
}
......
......@@ -192,7 +192,7 @@ Node* EscapeAnalysisReducer::ReduceDeoptState(Node* node, Node* effect,
return ObjectIdNode(vobject);
} else {
std::vector<Node*> inputs;
for (int offset = 0; offset < vobject->size(); offset += kPointerSize) {
for (int offset = 0; offset < vobject->size(); offset += kTaggedSize) {
Node* field =
analysis_result().GetVirtualObjectField(vobject, offset, effect);
CHECK_NOT_NULL(field);
......@@ -315,10 +315,10 @@ void EscapeAnalysisReducer::Finalize() {
ElementAccess stack_access;
stack_access.base_is_tagged = BaseTaggedness::kUntaggedBase;
// Reduce base address by {kPointerSize} such that (length - index)
// Reduce base address by {kSystemPointerSize} such that (length - index)
// resolves to the right position.
stack_access.header_size =
CommonFrameConstants::kFixedFrameSizeAboveFp - kPointerSize;
CommonFrameConstants::kFixedFrameSizeAboveFp - kSystemPointerSize;
stack_access.type = Type::NonInternal();
stack_access.machine_type = MachineType::AnyTagged();
stack_access.write_barrier_kind = WriteBarrierKind::kNoWriteBarrier;
......
......@@ -503,7 +503,7 @@ int OffsetOfFieldAccess(const Operator* op) {
int OffsetOfElementAt(ElementAccess const& access, int index) {
DCHECK_GE(index, 0);
DCHECK_GE(ElementSizeLog2Of(access.machine_type.representation()),
kPointerSizeLog2);
kTaggedSizeLog2);
return access.header_size +
(index << ElementSizeLog2Of(access.machine_type.representation()));
}
......@@ -846,9 +846,9 @@ const VirtualObject* EscapeAnalysisResult::GetVirtualObject(Node* node) {
VirtualObject::VirtualObject(VariableTracker* var_states, VirtualObject::Id id,
int size)
: Dependable(var_states->zone()), id_(id), fields_(var_states->zone()) {
DCHECK_EQ(0, size % kPointerSize);
DCHECK(IsAligned(size, kTaggedSize));
TRACE("Creating VirtualObject id:%d size:%d\n", id, size);
int num_fields = size / kPointerSize;
int num_fields = size / kTaggedSize;
fields_.reserve(num_fields);
for (int i = 0; i < num_fields; ++i) {
fields_.push_back(var_states->NewVariable());
......
......@@ -121,7 +121,7 @@ class VirtualObject : public Dependable {
typedef ZoneVector<Variable>::const_iterator const_iterator;
VirtualObject(VariableTracker* var_states, Id id, int size);
Maybe<Variable> FieldAt(int offset) const {
CHECK_EQ(0, offset % kPointerSize);
CHECK(IsAligned(offset, kTaggedSize));
CHECK(!HasEscaped());
if (offset >= size()) {
// TODO(tebbi): Reading out-of-bounds can only happen in unreachable
......@@ -130,10 +130,10 @@ class VirtualObject : public Dependable {
// once we can handle dead nodes everywhere.
return Nothing<Variable>();
}
return Just(fields_.at(offset / kPointerSize));
return Just(fields_.at(offset / kTaggedSize));
}
Id id() const { return id_; }
int size() const { return static_cast<int>(kPointerSize * fields_.size()); }
int size() const { return static_cast<int>(kTaggedSize * fields_.size()); }
// Escaped might mean that the object escaped to untracked memory or that it
// is used in an operation that requires materialization.
void SetEscaped() { escaped_ = true; }
......
......@@ -19,7 +19,7 @@ Frame::Frame(int fixed_frame_size_in_slots)
allocated_double_registers_(nullptr) {}
int Frame::AlignFrame(int alignment) {
int alignment_slots = alignment / kPointerSize;
int alignment_slots = alignment / kSystemPointerSize;
// We have to align return slots separately, because they are claimed
// separately on the stack.
int return_delta =
......
......@@ -110,7 +110,7 @@ class Frame : public ZoneObject {
}
void AlignSavedCalleeRegisterSlots(int alignment = kDoubleSize) {
int alignment_slots = alignment / kPointerSize;
int alignment_slots = alignment / kSystemPointerSize;
int delta = alignment_slots - (frame_slot_count_ & (alignment_slots - 1));
if (delta != alignment_slots) {
frame_slot_count_ += delta;
......@@ -126,10 +126,10 @@ class Frame : public ZoneObject {
DCHECK_EQ(frame_slot_count_,
fixed_slot_count_ + spill_slot_count_ + return_slot_count_);
int frame_slot_count_before = frame_slot_count_;
if (alignment > kPointerSize) {
if (alignment > kSystemPointerSize) {
// Slots are pointer sized, so alignment greater than a pointer size
// requires allocating additional slots.
width += alignment - kPointerSize;
width += alignment - kSystemPointerSize;
}
AllocateAlignedFrameSlots(width);
spill_slot_count_ += frame_slot_count_ - frame_slot_count_before;
......@@ -156,12 +156,13 @@ class Frame : public ZoneObject {
private:
void AllocateAlignedFrameSlots(int width) {
DCHECK_LT(0, width);
int new_frame_slots = (width + kPointerSize - 1) / kPointerSize;
int new_frame_slots = (width + kSystemPointerSize - 1) / kSystemPointerSize;
// Align to 8 bytes if width is a multiple of 8 bytes, and to 16 bytes if
// multiple of 16.
int align_to = (width & 15) == 0 ? 16 : (width & 7) == 0 ? 8 : kPointerSize;
frame_slot_count_ =
RoundUp(frame_slot_count_ + new_frame_slots, align_to / kPointerSize);
int align_to =
(width & 15) == 0 ? 16 : (width & 7) == 0 ? 8 : kSystemPointerSize;
frame_slot_count_ = RoundUp(frame_slot_count_ + new_frame_slots,
align_to / kSystemPointerSize);
DCHECK_LT(0, frame_slot_count_);
}
......@@ -236,7 +237,9 @@ class FrameAccessState : public ZoneObject {
StandardFrameConstants::kFixedSlotCountAboveFp;
return frame_slot_count + sp_delta();
}
int GetSPToFPOffset() const { return GetSPToFPSlotCount() * kPointerSize; }
int GetSPToFPOffset() const {
return GetSPToFPSlotCount() * kSystemPointerSize;
}
// Get the frame offset for a given spill slot. The location depends on the
// calling convention and the specific frame layout, and may thus be
......
......@@ -206,7 +206,7 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) {
// Actually allocate and initialize the arguments object.
AllocationBuilder a(jsgraph(), effect, control);
Node* properties = jsgraph()->EmptyFixedArrayConstant();
STATIC_ASSERT(JSSloppyArgumentsObject::kSize == 5 * kPointerSize);
STATIC_ASSERT(JSSloppyArgumentsObject::kSize == 5 * kTaggedSize);
a.Allocate(JSSloppyArgumentsObject::kSize);
a.Store(AccessBuilder::ForMap(), arguments_map);
a.Store(AccessBuilder::ForJSObjectPropertiesOrHash(), properties);
......@@ -235,7 +235,7 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) {
// Actually allocate and initialize the arguments object.
AllocationBuilder a(jsgraph(), effect, control);
Node* properties = jsgraph()->EmptyFixedArrayConstant();
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kTaggedSize);
a.Allocate(JSStrictArgumentsObject::kSize);
a.Store(AccessBuilder::ForMap(), arguments_map);
a.Store(AccessBuilder::ForJSObjectPropertiesOrHash(), properties);
......@@ -265,7 +265,7 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) {
// Actually allocate and initialize the jsarray.
AllocationBuilder a(jsgraph(), effect, control);
Node* properties = jsgraph()->EmptyFixedArrayConstant();
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
STATIC_ASSERT(JSArray::kSize == 4 * kTaggedSize);
a.Allocate(JSArray::kSize);
a.Store(AccessBuilder::ForMap(), jsarray_map);
a.Store(AccessBuilder::ForJSObjectPropertiesOrHash(), properties);
......@@ -311,7 +311,7 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) {
AllocationBuilder a(jsgraph(), effect, control);
Node* properties = jsgraph()->EmptyFixedArrayConstant();
int length = args_state_info.parameter_count() - 1; // Minus receiver.
STATIC_ASSERT(JSSloppyArgumentsObject::kSize == 5 * kPointerSize);
STATIC_ASSERT(JSSloppyArgumentsObject::kSize == 5 * kTaggedSize);
a.Allocate(JSSloppyArgumentsObject::kSize);
a.Store(AccessBuilder::ForMap(), arguments_map);
a.Store(AccessBuilder::ForJSObjectPropertiesOrHash(), properties);
......@@ -347,7 +347,7 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) {
AllocationBuilder a(jsgraph(), effect, control);
Node* properties = jsgraph()->EmptyFixedArrayConstant();
int length = args_state_info.parameter_count() - 1; // Minus receiver.
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kTaggedSize);
a.Allocate(JSStrictArgumentsObject::kSize);
a.Store(AccessBuilder::ForMap(), arguments_map);
a.Store(AccessBuilder::ForJSObjectPropertiesOrHash(), properties);
......@@ -387,7 +387,7 @@ Reduction JSCreateLowering::ReduceJSCreateArguments(Node* node) {
// -1 to minus receiver
int argument_count = args_state_info.parameter_count() - 1;
int length = std::max(0, argument_count - start_index);
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
STATIC_ASSERT(JSArray::kSize == 4 * kTaggedSize);
a.Allocate(JSArray::kSize);
a.Store(AccessBuilder::ForMap(), jsarray_map);
a.Store(AccessBuilder::ForJSObjectPropertiesOrHash(), properties);
......@@ -970,7 +970,7 @@ Reduction JSCreateLowering::ReduceJSCreateClosure(Node* node) {
PretenureFlag pretenure = NOT_TENURED;
// Emit code to allocate the JSFunction instance.
STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
AllocationBuilder a(jsgraph(), effect, control);
a.Allocate(function_map.instance_size(), pretenure, Type::Function());
a.Store(AccessBuilder::ForMap(), function_map);
......@@ -982,11 +982,11 @@ Reduction JSCreateLowering::ReduceJSCreateClosure(Node* node) {
a.Store(AccessBuilder::ForJSFunctionContext(), context);
a.Store(AccessBuilder::ForJSFunctionFeedbackCell(), feedback_cell);
a.Store(AccessBuilder::ForJSFunctionCode(), code);
STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kPointerSize);
STATIC_ASSERT(JSFunction::kSizeWithoutPrototype == 7 * kTaggedSize);
if (function_map.has_prototype_slot()) {
a.Store(AccessBuilder::ForJSFunctionPrototypeOrInitialMap(),
jsgraph()->TheHoleConstant());
STATIC_ASSERT(JSFunction::kSizeWithPrototype == 8 * kPointerSize);
STATIC_ASSERT(JSFunction::kSizeWithPrototype == 8 * kTaggedSize);
}
for (int i = 0; i < function_map.GetInObjectProperties(); i++) {
a.Store(AccessBuilder::ForJSObjectInObjectProperty(function_map, i),
......@@ -1016,7 +1016,7 @@ Reduction JSCreateLowering::ReduceJSCreateIterResultObject(Node* node) {
jsgraph()->EmptyFixedArrayConstant());
a.Store(AccessBuilder::ForJSIteratorResultValue(), value);
a.Store(AccessBuilder::ForJSIteratorResultDone(), done);
STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
STATIC_ASSERT(JSIteratorResult::kSize == 5 * kTaggedSize);
a.FinishAndChange(node);
return Changed(node);
}
......@@ -1038,7 +1038,7 @@ Reduction JSCreateLowering::ReduceJSCreateStringIterator(Node* node) {
jsgraph()->EmptyFixedArrayConstant());
a.Store(AccessBuilder::ForJSStringIteratorString(), string);
a.Store(AccessBuilder::ForJSStringIteratorIndex(), jsgraph()->SmiConstant(0));
STATIC_ASSERT(JSIteratorResult::kSize == 5 * kPointerSize);
STATIC_ASSERT(JSIteratorResult::kSize == 5 * kTaggedSize);
a.FinishAndChange(node);
return Changed(node);
}
......@@ -1068,7 +1068,7 @@ Reduction JSCreateLowering::ReduceJSCreateKeyValueArray(Node* node) {
a.Store(AccessBuilder::ForJSObjectPropertiesOrHash(), properties);
a.Store(AccessBuilder::ForJSObjectElements(), elements);
a.Store(AccessBuilder::ForJSArrayLength(PACKED_ELEMENTS), length);
STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
STATIC_ASSERT(JSArray::kSize == 4 * kTaggedSize);
a.FinishAndChange(node);
return Changed(node);
}
......@@ -1091,7 +1091,7 @@ Reduction JSCreateLowering::ReduceJSCreatePromise(Node* node) {
STATIC_ASSERT(v8::Promise::kPending == 0);
a.Store(AccessBuilder::ForJSObjectOffset(JSPromise::kFlagsOffset),
jsgraph()->ZeroConstant());
STATIC_ASSERT(JSPromise::kSize == 5 * kPointerSize);
STATIC_ASSERT(JSPromise::kSize == 5 * kTaggedSize);
for (int offset = JSPromise::kSize;
offset < JSPromise::kSizeWithEmbedderFields; offset += kTaggedSize) {
a.Store(AccessBuilder::ForJSObjectOffset(offset),
......@@ -1412,7 +1412,7 @@ Reduction JSCreateLowering::ReduceJSCreateObject(Node* node) {
// Initialize Object fields.
Node* undefined = jsgraph()->UndefinedConstant();
for (int offset = JSObject::kHeaderSize; offset < instance_size;
offset += kPointerSize) {
offset += kTaggedSize) {
a.Store(AccessBuilder::ForJSObjectOffset(offset, kNoWriteBarrier),
undefined);
}
......@@ -1787,17 +1787,16 @@ Node* JSCreateLowering::AllocateLiteralRegExp(Node* effect, Node* control,
// Sanity check that JSRegExp object layout hasn't changed.
STATIC_ASSERT(static_cast<int>(JSRegExp::kDataOffset) ==
static_cast<int>(JSObject::kHeaderSize));
STATIC_ASSERT(JSRegExp::kSourceOffset ==
JSRegExp::kDataOffset + kPointerSize);
STATIC_ASSERT(JSRegExp::kSourceOffset == JSRegExp::kDataOffset + kTaggedSize);
STATIC_ASSERT(JSRegExp::kFlagsOffset ==
JSRegExp::kSourceOffset + kPointerSize);
STATIC_ASSERT(JSRegExp::kSize == JSRegExp::kFlagsOffset + kPointerSize);
JSRegExp::kSourceOffset + kTaggedSize);
STATIC_ASSERT(JSRegExp::kSize == JSRegExp::kFlagsOffset + kTaggedSize);
STATIC_ASSERT(JSRegExp::kLastIndexOffset == JSRegExp::kSize);
STATIC_ASSERT(JSRegExp::kInObjectFieldCount == 1); // LastIndex.
const PretenureFlag pretenure = NOT_TENURED;
const int size =
JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kPointerSize;
JSRegExp::kSize + JSRegExp::kInObjectFieldCount * kTaggedSize;
AllocationBuilder builder(jsgraph(), effect, control);
builder.Allocate(size, pretenure, Type::For(boilerplate_map));
......
......@@ -1953,7 +1953,7 @@ int MapRef::GetInObjectPropertyOffset(int i) const {
AllowHandleDereference allow_handle_dereference;
return object()->GetInObjectPropertyOffset(i);
}
return (GetInObjectPropertiesStartInWords() + i) * kPointerSize;
return (GetInObjectPropertiesStartInWords() + i) * kTaggedSize;
}
PropertyDetails MapRef::GetPropertyDetails(int descriptor_index) const {
......
......@@ -67,14 +67,14 @@ class LinkageLocation {
static LinkageLocation ForSavedCallerReturnAddress() {
return ForCalleeFrameSlot((StandardFrameConstants::kCallerPCOffset -
StandardFrameConstants::kCallerPCOffset) /
kPointerSize,
kSystemPointerSize,
MachineType::Pointer());
}
static LinkageLocation ForSavedCallerFramePtr() {
return ForCalleeFrameSlot((StandardFrameConstants::kCallerPCOffset -
StandardFrameConstants::kCallerFPOffset) /
kPointerSize,
kSystemPointerSize,
MachineType::Pointer());
}
......@@ -82,14 +82,14 @@ class LinkageLocation {
DCHECK(V8_EMBEDDED_CONSTANT_POOL);
return ForCalleeFrameSlot((StandardFrameConstants::kCallerPCOffset -
StandardFrameConstants::kConstantPoolOffset) /
kPointerSize,
kSystemPointerSize,
MachineType::AnyTagged());
}
static LinkageLocation ForSavedCallerFunction() {
return ForCalleeFrameSlot((StandardFrameConstants::kCallerPCOffset -
StandardFrameConstants::kFunctionOffset) /
kPointerSize,
kSystemPointerSize,
MachineType::AnyTagged());
}
......@@ -111,7 +111,7 @@ class LinkageLocation {
int GetSizeInPointers() const {
// Round up
return (GetSize() + kPointerSize - 1) / kPointerSize;
return (GetSize() + kSystemPointerSize - 1) / kSystemPointerSize;
}
int32_t GetLocation() const {
......
......@@ -1209,8 +1209,8 @@ LoadElimination::AbstractState const* LoadElimination::ComputeLoopState(
// static
int LoadElimination::FieldIndexOf(int offset) {
DCHECK_EQ(0, offset % kPointerSize);
int field_index = offset / kPointerSize;
DCHECK(IsAligned(offset, kTaggedSize));
int field_index = offset / kTaggedSize;
if (field_index >= static_cast<int>(kMaxTrackedFields)) return -1;
DCHECK_LT(0, field_index);
return field_index - 1;
......@@ -1226,9 +1226,13 @@ int LoadElimination::FieldIndexOf(FieldAccess const& access) {
UNREACHABLE();
break;
case MachineRepresentation::kWord32:
if (kInt32Size != kTaggedSize) {
return -1; // We currently only track tagged pointer size fields.
}
break;
case MachineRepresentation::kWord64:
if (rep != MachineType::PointerRepresentation()) {
return -1; // We currently only track pointer size fields.
if (kInt64Size != kTaggedSize) {
return -1; // We currently only track tagged pointer size fields.
}
break;
case MachineRepresentation::kWord8:
......@@ -1236,8 +1240,8 @@ int LoadElimination::FieldIndexOf(FieldAccess const& access) {
case MachineRepresentation::kFloat32:
return -1; // Currently untracked.
case MachineRepresentation::kFloat64:
if (kDoubleSize != kPointerSize) {
return -1; // We currently only track pointer size fields.
if (kDoubleSize != kTaggedSize) {
return -1; // We currently only track tagged pointer size fields.
}
break;
case MachineRepresentation::kTaggedSigned:
......
......@@ -54,7 +54,7 @@ Node* MachineGraph::RelocatableInt64Constant(int64_t value,
Node* MachineGraph::RelocatableIntPtrConstant(intptr_t value,
RelocInfo::Mode rmode) {
return kPointerSize == 8
return kSystemPointerSize == 8
? RelocatableInt64Constant(value, rmode)
: RelocatableInt32Constant(static_cast<int>(value), rmode);
}
......
......@@ -52,7 +52,7 @@ Node* RawMachineAssembler::UndefinedConstant() {
Node* RawMachineAssembler::RelocatableIntPtrConstant(intptr_t value,
RelocInfo::Mode rmode) {
return kPointerSize == 8
return kSystemPointerSize == 8
? RelocatableInt64Constant(value, rmode)
: RelocatableInt32Constant(static_cast<int>(value), rmode);
}
......
......@@ -80,7 +80,7 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
}
Node* IntPtrConstant(intptr_t value) {
// TODO(dcarney): mark generated code as unserializable if value != 0.
return kPointerSize == 8 ? Int64Constant(value)
return kSystemPointerSize == 8 ? Int64Constant(value)
: Int32Constant(static_cast<int>(value));
}
Node* RelocatableIntPtrConstant(intptr_t value, RelocInfo::Mode rmode);
......@@ -476,7 +476,7 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
#define INTPTR_BINOP(prefix, name) \
Node* IntPtr##name(Node* a, Node* b) { \
return kPointerSize == 8 ? prefix##64##name(a, b) \
return kSystemPointerSize == 8 ? prefix##64##name(a, b) \
: prefix##32##name(a, b); \
}
......@@ -497,7 +497,7 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
#define UINTPTR_BINOP(prefix, name) \
Node* UintPtr##name(Node* a, Node* b) { \
return kPointerSize == 8 ? prefix##64##name(a, b) \
return kSystemPointerSize == 8 ? prefix##64##name(a, b) \
: prefix##32##name(a, b); \
}
......@@ -517,7 +517,7 @@ class V8_EXPORT_PRIVATE RawMachineAssembler {
}
Node* IntPtrAbsWithOverflow(Node* a) {
return kPointerSize == 8 ? Int64AbsWithOverflow(a)
return kSystemPointerSize == 8 ? Int64AbsWithOverflow(a)
: Int32AbsWithOverflow(a);
}
......
......@@ -2708,7 +2708,7 @@ Node* WasmGraphBuilder::BuildImportCall(wasm::FunctionSig* sig, Node** args,
LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer());
Node* target_node = SetEffect(graph()->NewNode(
mcgraph()->machine()->Load(MachineType::Pointer()), imported_targets,
mcgraph()->Int32Constant(func_index * kPointerSize), Effect(),
mcgraph()->Int32Constant(func_index * kSystemPointerSize), Effect(),
Control()));
args[0] = target_node;
return BuildWasmCall(sig, args, rets, position, ref_node,
......@@ -2722,21 +2722,23 @@ Node* WasmGraphBuilder::BuildImportCall(wasm::FunctionSig* sig, Node** args,
// Load the imported function refs array from the instance.
Node* imported_function_refs =
LOAD_INSTANCE_FIELD(ImportedFunctionRefs, MachineType::TaggedPointer());
// Access fixed array at {header_size - tag + func_index * kPointerSize}.
// Access fixed array at {header_size - tag + func_index * kTaggedSize}.
Node* imported_instances_data = graph()->NewNode(
mcgraph()->machine()->IntAdd(), imported_function_refs,
mcgraph()->IntPtrConstant(
wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0)));
Node* func_index_times_pointersize = graph()->NewNode(
Node* func_index_times_tagged_size = graph()->NewNode(
mcgraph()->machine()->IntMul(), Uint32ToUintptr(func_index),
mcgraph()->Int32Constant(kPointerSize));
mcgraph()->Int32Constant(kTaggedSize));
Node* ref_node = SetEffect(
graph()->NewNode(mcgraph()->machine()->Load(MachineType::TaggedPointer()),
imported_instances_data, func_index_times_pointersize,
imported_instances_data, func_index_times_tagged_size,
Effect(), Control()));
// Load the target from the imported_targets array at the offset of
// {func_index}.
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
Node* func_index_times_pointersize = func_index_times_tagged_size;
Node* imported_targets =
LOAD_INSTANCE_FIELD(ImportedFunctionTargets, MachineType::Pointer());
Node* target_node = SetEffect(graph()->NewNode(
......@@ -2802,12 +2804,12 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
LOAD_INSTANCE_FIELD(IndirectFunctionTableSigIds, MachineType::Pointer());
int32_t expected_sig_id = env_->module->signature_ids[sig_index];
Node* scaled_key = Uint32ToUintptr(
Node* int32_scaled_key = Uint32ToUintptr(
graph()->NewNode(machine->Word32Shl(), key, Int32Constant(2)));
Node* loaded_sig =
SetEffect(graph()->NewNode(machine->Load(MachineType::Int32()),
ift_sig_ids, scaled_key, Effect(), Control()));
Node* loaded_sig = SetEffect(
graph()->NewNode(machine->Load(MachineType::Int32()), ift_sig_ids,
int32_scaled_key, Effect(), Control()));
Node* sig_match = graph()->NewNode(machine->WordEqual(), loaded_sig,
Int32Constant(expected_sig_id));
......@@ -2818,16 +2820,19 @@ Node* WasmGraphBuilder::CallIndirect(uint32_t sig_index, Node** args,
Node* ift_instances = LOAD_INSTANCE_FIELD(IndirectFunctionTableRefs,
MachineType::TaggedPointer());
scaled_key = graph()->NewNode(machine->Word32Shl(), key,
Int32Constant(kPointerSizeLog2));
Node* intptr_scaled_key = graph()->NewNode(
machine->Word32Shl(), key, Int32Constant(kSystemPointerSizeLog2));
Node* target =
SetEffect(graph()->NewNode(machine->Load(MachineType::Pointer()),
ift_targets, scaled_key, Effect(), Control()));
Node* target = SetEffect(
graph()->NewNode(machine->Load(MachineType::Pointer()), ift_targets,
intptr_scaled_key, Effect(), Control()));
STATIC_ASSERT(kTaggedSize == kSystemPointerSize);
Node* tagged_scaled_key = intptr_scaled_key;
Node* target_instance = SetEffect(graph()->NewNode(
machine->Load(MachineType::TaggedPointer()),
graph()->NewNode(machine->IntAdd(), ift_instances, scaled_key),
graph()->NewNode(machine->IntAdd(), ift_instances, tagged_scaled_key),
Int32Constant(wasm::ObjectAccess::ElementOffsetInTaggedFixedArray(0)),
Effect(), Control()));
......
......@@ -293,7 +293,7 @@ V8_EXPORT_PRIVATE inline int ElementSizeLog2Of(MachineRepresentation rep) {
case MachineRepresentation::kTaggedSigned:
case MachineRepresentation::kTaggedPointer:
case MachineRepresentation::kTagged:
return kPointerSizeLog2;
return kTaggedSizeLog2;
default:
break;
}
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment