Commit 369214c1 authored by erik.corry@gmail.com's avatar erik.corry@gmail.com

Use a root array register for constant loads and stack guards on x64.

This only wins us around 1% in performance, but it makes the code more
compact.  We don't currently have a way to represent in the virtual
frame that a slot contains a value from the root array.  Adding this
would probably make the code more compact.
Review URL: http://codereview.chromium.org/174639

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@2783 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 5ce6842d
...@@ -3253,15 +3253,13 @@ bool Heap::Setup(bool create_heap_objects) { ...@@ -3253,15 +3253,13 @@ bool Heap::Setup(bool create_heap_objects) {
void Heap::SetStackLimit(intptr_t limit) { void Heap::SetStackLimit(intptr_t limit) {
// We don't use the stack limit in the roots array on x86-64 yet, but since // On 64 bit machines, pointers are generally out of range of Smis. We write
// pointers are generally out of range of Smis we should set the value either. // something that looks like an out of range Smi to the GC.
#if !V8_HOST_ARCH_64_BIT
// Set up the special root array entry containing the stack guard. // Set up the special root array entry containing the stack guard.
// This is actually an address, but the tag makes the GC ignore it. // This is actually an address, but the tag makes the GC ignore it.
set_stack_limit(Smi::FromInt(limit >> kSmiTagSize)); roots_[kStackLimitRootIndex] =
#else reinterpret_cast<Object*>((limit & ~kSmiTagMask) | kSmiTag);
set_stack_limit(Smi::FromInt(0));
#endif
} }
......
...@@ -35,8 +35,21 @@ namespace internal { ...@@ -35,8 +35,21 @@ namespace internal {
// Defines all the roots in Heap. // Defines all the roots in Heap.
#define STRONG_ROOT_LIST(V) \ #define STRONG_ROOT_LIST(V) \
V(Map, meta_map, MetaMap) \ /* Cluster the most popular ones in a few cache lines here at the top. */ \
V(Smi, stack_limit, StackLimit) \
V(Object, undefined_value, UndefinedValue) \
V(Object, the_hole_value, TheHoleValue) \
V(Object, null_value, NullValue) \
V(Object, true_value, TrueValue) \
V(Object, false_value, FalseValue) \
V(Map, heap_number_map, HeapNumberMap) \ V(Map, heap_number_map, HeapNumberMap) \
V(Map, global_context_map, GlobalContextMap) \
V(Map, fixed_array_map, FixedArrayMap) \
V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Map, meta_map, MetaMap) \
V(Object, termination_exception, TerminationException) \
V(Map, hash_table_map, HashTableMap) \
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
V(Map, short_string_map, ShortStringMap) \ V(Map, short_string_map, ShortStringMap) \
V(Map, medium_string_map, MediumStringMap) \ V(Map, medium_string_map, MediumStringMap) \
V(Map, long_string_map, LongStringMap) \ V(Map, long_string_map, LongStringMap) \
...@@ -95,11 +108,8 @@ namespace internal { ...@@ -95,11 +108,8 @@ namespace internal {
V(Map, undetectable_long_ascii_string_map, UndetectableLongAsciiStringMap) \ V(Map, undetectable_long_ascii_string_map, UndetectableLongAsciiStringMap) \
V(Map, byte_array_map, ByteArrayMap) \ V(Map, byte_array_map, ByteArrayMap) \
V(Map, pixel_array_map, PixelArrayMap) \ V(Map, pixel_array_map, PixelArrayMap) \
V(Map, fixed_array_map, FixedArrayMap) \
V(Map, hash_table_map, HashTableMap) \
V(Map, context_map, ContextMap) \ V(Map, context_map, ContextMap) \
V(Map, catch_context_map, CatchContextMap) \ V(Map, catch_context_map, CatchContextMap) \
V(Map, global_context_map, GlobalContextMap) \
V(Map, code_map, CodeMap) \ V(Map, code_map, CodeMap) \
V(Map, oddball_map, OddballMap) \ V(Map, oddball_map, OddballMap) \
V(Map, global_property_cell_map, GlobalPropertyCellMap) \ V(Map, global_property_cell_map, GlobalPropertyCellMap) \
...@@ -109,17 +119,9 @@ namespace internal { ...@@ -109,17 +119,9 @@ namespace internal {
V(Map, one_pointer_filler_map, OnePointerFillerMap) \ V(Map, one_pointer_filler_map, OnePointerFillerMap) \
V(Map, two_pointer_filler_map, TwoPointerFillerMap) \ V(Map, two_pointer_filler_map, TwoPointerFillerMap) \
V(Object, nan_value, NanValue) \ V(Object, nan_value, NanValue) \
V(Object, undefined_value, UndefinedValue) \
V(Object, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Object, termination_exception, TerminationException) \
V(Object, minus_zero_value, MinusZeroValue) \ V(Object, minus_zero_value, MinusZeroValue) \
V(Object, null_value, NullValue) \
V(Object, true_value, TrueValue) \
V(Object, false_value, FalseValue) \
V(String, empty_string, EmptyString) \ V(String, empty_string, EmptyString) \
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \ V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
V(Object, the_hole_value, TheHoleValue) \
V(Map, neander_map, NeanderMap) \ V(Map, neander_map, NeanderMap) \
V(JSObject, message_listeners, MessageListeners) \ V(JSObject, message_listeners, MessageListeners) \
V(Proxy, prototype_accessors, PrototypeAccessors) \ V(Proxy, prototype_accessors, PrototypeAccessors) \
...@@ -133,7 +135,6 @@ namespace internal { ...@@ -133,7 +135,6 @@ namespace internal {
V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \ V(FixedArray, single_character_string_cache, SingleCharacterStringCache) \
V(FixedArray, natives_source_cache, NativesSourceCache) \ V(FixedArray, natives_source_cache, NativesSourceCache) \
V(Object, last_script_id, LastScriptId) \ V(Object, last_script_id, LastScriptId) \
V(Smi, stack_limit, StackLimit)
#define ROOT_LIST(V) \ #define ROOT_LIST(V) \
......
...@@ -139,9 +139,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { ...@@ -139,9 +139,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// Fill remaining expected arguments with undefined values. // Fill remaining expected arguments with undefined values.
Label fill; Label fill;
__ movq(kScratchRegister, __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
Factory::undefined_value(),
RelocInfo::EMBEDDED_OBJECT);
__ bind(&fill); __ bind(&fill);
__ incq(rcx); __ incq(rcx);
__ push(kScratchRegister); __ push(kScratchRegister);
...@@ -218,9 +216,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) { ...@@ -218,9 +216,9 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ testl(rbx, Immediate(kSmiTagMask)); __ testl(rbx, Immediate(kSmiTagMask));
__ j(zero, &call_to_object); __ j(zero, &call_to_object);
__ Cmp(rbx, Factory::null_value()); __ CompareRoot(rbx, Heap::kNullValueRootIndex);
__ j(equal, &use_global_receiver); __ j(equal, &use_global_receiver);
__ Cmp(rbx, Factory::undefined_value()); __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
__ j(equal, &use_global_receiver); __ j(equal, &use_global_receiver);
__ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx); __ CmpObjectType(rbx, FIRST_JS_OBJECT_TYPE, rcx);
...@@ -386,9 +384,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { ...@@ -386,9 +384,9 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) {
__ movq(rbx, Operand(rbp, kReceiverOffset)); __ movq(rbx, Operand(rbp, kReceiverOffset));
__ testl(rbx, Immediate(kSmiTagMask)); __ testl(rbx, Immediate(kSmiTagMask));
__ j(zero, &call_to_object); __ j(zero, &call_to_object);
__ Cmp(rbx, Factory::null_value()); __ CompareRoot(rbx, Heap::kNullValueRootIndex);
__ j(equal, &use_global_receiver); __ j(equal, &use_global_receiver);
__ Cmp(rbx, Factory::undefined_value()); __ CompareRoot(rbx, Heap::kUndefinedValueRootIndex);
__ j(equal, &use_global_receiver); __ j(equal, &use_global_receiver);
// If given receiver is already a JavaScript object then there's no // If given receiver is already a JavaScript object then there's no
...@@ -548,7 +546,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -548,7 +546,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// rbx: JSObject (not HeapObject tagged - the actual address). // rbx: JSObject (not HeapObject tagged - the actual address).
// rdi: start of next object // rdi: start of next object
__ movq(Operand(rbx, JSObject::kMapOffset), rax); __ movq(Operand(rbx, JSObject::kMapOffset), rax);
__ Move(rcx, Factory::empty_fixed_array()); __ LoadRoot(rcx, Heap::kEmptyFixedArrayRootIndex);
__ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx); __ movq(Operand(rbx, JSObject::kPropertiesOffset), rcx);
__ movq(Operand(rbx, JSObject::kElementsOffset), rcx); __ movq(Operand(rbx, JSObject::kElementsOffset), rcx);
// Set extra fields in the newly allocated object. // Set extra fields in the newly allocated object.
...@@ -556,7 +554,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -556,7 +554,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// rbx: JSObject // rbx: JSObject
// rdi: start of next object // rdi: start of next object
{ Label loop, entry; { Label loop, entry;
__ Move(rdx, Factory::undefined_value()); __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
__ lea(rcx, Operand(rbx, JSObject::kHeaderSize)); __ lea(rcx, Operand(rbx, JSObject::kHeaderSize));
__ jmp(&entry); __ jmp(&entry);
__ bind(&loop); __ bind(&loop);
...@@ -613,7 +611,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -613,7 +611,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// rdi: FixedArray // rdi: FixedArray
// rdx: number of elements // rdx: number of elements
// rax: start of next object // rax: start of next object
__ Move(rcx, Factory::fixed_array_map()); __ LoadRoot(rcx, Heap::kFixedArrayMapRootIndex);
__ movq(Operand(rdi, JSObject::kMapOffset), rcx); // setup the map __ movq(Operand(rdi, JSObject::kMapOffset), rcx); // setup the map
__ movl(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length __ movl(Operand(rdi, FixedArray::kLengthOffset), rdx); // and length
...@@ -623,7 +621,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { ...@@ -623,7 +621,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) {
// rax: start of next object // rax: start of next object
// rdx: number of elements // rdx: number of elements
{ Label loop, entry; { Label loop, entry;
__ Move(rdx, Factory::undefined_value()); __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex);
__ lea(rcx, Operand(rdi, FixedArray::kHeaderSize)); __ lea(rcx, Operand(rdi, FixedArray::kHeaderSize));
__ jmp(&entry); __ jmp(&entry);
__ bind(&loop); __ bind(&loop);
...@@ -797,6 +795,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, ...@@ -797,6 +795,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
__ movq(rax, rcx); __ movq(rax, rcx);
__ movq(rbx, r8); __ movq(rbx, r8);
#endif // _WIN64 #endif // _WIN64
// Set up the roots register.
ExternalReference roots_address = ExternalReference::roots_address();
__ movq(r13, roots_address);
// Current stack contents: // Current stack contents:
// [rsp + 2 * kPointerSize ... ]: Internal frame // [rsp + 2 * kPointerSize ... ]: Internal frame
// [rsp + kPointerSize] : function // [rsp + kPointerSize] : function
......
...@@ -71,8 +71,7 @@ void EntryNode::Compile(MacroAssembler* masm) { ...@@ -71,8 +71,7 @@ void EntryNode::Compile(MacroAssembler* masm) {
__ push(rdi); __ push(rdi);
int count = CfgGlobals::current()->fun()->scope()->num_stack_slots(); int count = CfgGlobals::current()->fun()->scope()->num_stack_slots();
if (count > 0) { if (count > 0) {
__ movq(kScratchRegister, Factory::undefined_value(), __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex);
RelocInfo::EMBEDDED_OBJECT);
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
__ push(kScratchRegister); __ push(kScratchRegister);
} }
......
This diff is collapsed.
...@@ -339,7 +339,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { ...@@ -339,7 +339,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ bind(&fast); __ bind(&fast);
__ movq(rax, Operand(rcx, rax, times_pointer_size, __ movq(rax, Operand(rcx, rax, times_pointer_size,
FixedArray::kHeaderSize - kHeapObjectTag)); FixedArray::kHeaderSize - kHeapObjectTag));
__ Cmp(rax, Factory::the_hole_value()); __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
// In case the loaded value is the_hole we have to consult GetProperty // In case the loaded value is the_hole we have to consult GetProperty
// to ensure the prototype chain is searched. // to ensure the prototype chain is searched.
__ j(equal, &slow); __ j(equal, &slow);
...@@ -613,9 +613,9 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) { ...@@ -613,9 +613,9 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
// Check for boolean. // Check for boolean.
__ bind(&non_string); __ bind(&non_string);
__ Cmp(rdx, Factory::true_value()); __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
__ j(equal, &boolean); __ j(equal, &boolean);
__ Cmp(rdx, Factory::false_value()); __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
__ j(not_equal, &miss); __ j(not_equal, &miss);
__ bind(&boolean); __ bind(&boolean);
StubCompiler::GenerateLoadGlobalFunctionPrototype( StubCompiler::GenerateLoadGlobalFunctionPrototype(
......
...@@ -46,6 +46,22 @@ MacroAssembler::MacroAssembler(void* buffer, int size) ...@@ -46,6 +46,22 @@ MacroAssembler::MacroAssembler(void* buffer, int size)
} }
void MacroAssembler::LoadRoot(Register destination,
Heap::RootListIndex index) {
movq(destination, Operand(r13, index << kPointerSizeLog2));
}
void MacroAssembler::PushRoot(Heap::RootListIndex index) {
push(Operand(r13, index << kPointerSizeLog2));
}
void MacroAssembler::CompareRoot(Register with,
Heap::RootListIndex index) {
cmpq(with, Operand(r13, index << kPointerSizeLog2));
}
static void RecordWriteHelper(MacroAssembler* masm, static void RecordWriteHelper(MacroAssembler* masm,
Register object, Register object,
...@@ -276,7 +292,7 @@ void MacroAssembler::IllegalOperation(int num_arguments) { ...@@ -276,7 +292,7 @@ void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) { if (num_arguments > 0) {
addq(rsp, Immediate(num_arguments * kPointerSize)); addq(rsp, Immediate(num_arguments * kPointerSize));
} }
movq(rax, Factory::undefined_value(), RelocInfo::EMBEDDED_OBJECT); LoadRoot(rax, Heap::kUndefinedValueRootIndex);
} }
...@@ -628,7 +644,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function, ...@@ -628,7 +644,7 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
// If the prototype or initial map is the hole, don't return it and // If the prototype or initial map is the hole, don't return it and
// simply miss the cache instead. This will allow us to allocate a // simply miss the cache instead. This will allow us to allocate a
// prototype object on-demand in the runtime system. // prototype object on-demand in the runtime system.
Cmp(result, Factory::the_hole_value()); CompareRoot(result, Heap::kTheHoleValueRootIndex);
j(equal, miss); j(equal, miss);
// If the function does not have an initial map, we're done. // If the function does not have an initial map, we're done.
...@@ -1182,12 +1198,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg, ...@@ -1182,12 +1198,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// Preserve original value of holder_reg. // Preserve original value of holder_reg.
push(holder_reg); push(holder_reg);
movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset)); movq(holder_reg, FieldOperand(holder_reg, JSGlobalProxy::kContextOffset));
Cmp(holder_reg, Factory::null_value()); CompareRoot(holder_reg, Heap::kNullValueRootIndex);
Check(not_equal, "JSGlobalProxy::context() should not be null."); Check(not_equal, "JSGlobalProxy::context() should not be null.");
// Read the first word and compare to global_context_map(), // Read the first word and compare to global_context_map(),
movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset)); movq(holder_reg, FieldOperand(holder_reg, HeapObject::kMapOffset));
Cmp(holder_reg, Factory::global_context_map()); CompareRoot(holder_reg, Heap::kGlobalContextMapRootIndex);
Check(equal, "JSGlobalObject::global_context should be a global context."); Check(equal, "JSGlobalObject::global_context should be a global context.");
pop(holder_reg); pop(holder_reg);
} }
......
...@@ -66,6 +66,10 @@ class MacroAssembler: public Assembler { ...@@ -66,6 +66,10 @@ class MacroAssembler: public Assembler {
public: public:
MacroAssembler(void* buffer, int size); MacroAssembler(void* buffer, int size);
void LoadRoot(Register destination, Heap::RootListIndex index);
void CompareRoot(Register with, Heap::RootListIndex index);
void PushRoot(Heap::RootListIndex index);
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// GC Support // GC Support
......
...@@ -51,18 +51,18 @@ int RegisterAllocator::ToNumber(Register reg) { ...@@ -51,18 +51,18 @@ int RegisterAllocator::ToNumber(Register reg) {
2, // rcx 2, // rcx
3, // rdx 3, // rdx
1, // rbx 1, // rbx
-1, // rsp -1, // rsp Stack pointer.
-1, // rbp -1, // rbp Frame pointer.
-1, // rsi -1, // rsi Context.
4, // rdi 4, // rdi
5, // r8 5, // r8
6, // r9 6, // r9
-1, // r10 -1, // r10 Scratch register.
7, // r11 9, // r11
11, // r12 10, // r12
10, // r13 -1, // r13 Roots array. This is callee saved.
8, // r14 7, // r14
9 // r15 8 // r15
}; };
return kNumbers[reg.code()]; return kNumbers[reg.code()];
} }
...@@ -71,7 +71,7 @@ int RegisterAllocator::ToNumber(Register reg) { ...@@ -71,7 +71,7 @@ int RegisterAllocator::ToNumber(Register reg) {
Register RegisterAllocator::ToRegister(int num) { Register RegisterAllocator::ToRegister(int num) {
ASSERT(num >= 0 && num < kNumRegisters); ASSERT(num >= 0 && num < kNumRegisters);
const Register kRegisters[] = const Register kRegisters[] =
{ rax, rbx, rcx, rdx, rdi, r8, r9, r11, r14, r15, r13, r12 }; { rax, rbx, rcx, rdx, rdi, r8, r9, r14, r15, r11, r12 };
return kRegisters[num]; return kRegisters[num];
} }
......
...@@ -33,9 +33,7 @@ namespace internal { ...@@ -33,9 +33,7 @@ namespace internal {
class RegisterAllocatorConstants : public AllStatic { class RegisterAllocatorConstants : public AllStatic {
public: public:
// Register allocation is not yet implemented on x64, but C++ static const int kNumRegisters = 11;
// forbids 0-length arrays so we use 1 as the number of registers.
static const int kNumRegisters = 12;
static const int kInvalidRegister = -1; static const int kInvalidRegister = -1;
}; };
......
...@@ -434,7 +434,7 @@ class LoadInterceptorCompiler BASE_EMBEDDED { ...@@ -434,7 +434,7 @@ class LoadInterceptorCompiler BASE_EMBEDDED {
holder_obj); holder_obj);
Label interceptor_failed; Label interceptor_failed;
__ Cmp(rax, Factory::no_interceptor_result_sentinel()); __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
__ j(equal, &interceptor_failed); __ j(equal, &interceptor_failed);
__ LeaveInternalFrame(); __ LeaveInternalFrame();
__ ret(0); __ ret(0);
...@@ -612,7 +612,7 @@ class CallInterceptorCompiler BASE_EMBEDDED { ...@@ -612,7 +612,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
__ pop(receiver); // restore holder __ pop(receiver); // restore holder
__ LeaveInternalFrame(); __ LeaveInternalFrame();
__ Cmp(rax, Factory::no_interceptor_result_sentinel()); __ CompareRoot(rax, Heap::kNoInterceptorResultSentinelRootIndex);
Label invoke; Label invoke;
__ j(not_equal, &invoke); __ j(not_equal, &invoke);
...@@ -755,9 +755,9 @@ Object* CallStubCompiler::CompileCallConstant(Object* object, ...@@ -755,9 +755,9 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
case BOOLEAN_CHECK: { case BOOLEAN_CHECK: {
Label fast; Label fast;
// Check that the object is a boolean. // Check that the object is a boolean.
__ Cmp(rdx, Factory::true_value()); __ CompareRoot(rdx, Heap::kTrueValueRootIndex);
__ j(equal, &fast); __ j(equal, &fast);
__ Cmp(rdx, Factory::false_value()); __ CompareRoot(rdx, Heap::kFalseValueRootIndex);
__ j(not_equal, &miss); __ j(not_equal, &miss);
__ bind(&fast); __ bind(&fast);
// Check that the maps starting from the prototype haven't changed. // Check that the maps starting from the prototype haven't changed.
...@@ -1125,10 +1125,10 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object, ...@@ -1125,10 +1125,10 @@ Object* LoadStubCompiler::CompileLoadGlobal(JSObject* object,
// Check for deleted property if property can actually be deleted. // Check for deleted property if property can actually be deleted.
if (!is_dont_delete) { if (!is_dont_delete) {
__ Cmp(rax, Factory::the_hole_value()); __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
__ j(equal, &miss); __ j(equal, &miss);
} else if (FLAG_debug_code) { } else if (FLAG_debug_code) {
__ Cmp(rax, Factory::the_hole_value()); __ CompareRoot(rax, Heap::kTheHoleValueRootIndex);
__ Check(not_equal, "DontDelete cells can't contain the hole"); __ Check(not_equal, "DontDelete cells can't contain the hole");
} }
......
...@@ -205,6 +205,14 @@ void VirtualFrame::EmitPush(Handle<Object> value) { ...@@ -205,6 +205,14 @@ void VirtualFrame::EmitPush(Handle<Object> value) {
} }
void VirtualFrame::EmitPush(Heap::RootListIndex index) {
ASSERT(stack_pointer_ == element_count() - 1);
elements_.Add(FrameElement::MemoryElement());
stack_pointer_++;
__ PushRoot(index);
}
void VirtualFrame::Drop(int count) { void VirtualFrame::Drop(int count) {
ASSERT(count >= 0); ASSERT(count >= 0);
ASSERT(height() >= count); ASSERT(height() >= count);
......
...@@ -375,6 +375,7 @@ class VirtualFrame : public ZoneObject { ...@@ -375,6 +375,7 @@ class VirtualFrame : public ZoneObject {
// corresponding push instruction. // corresponding push instruction.
void EmitPush(Register reg); void EmitPush(Register reg);
void EmitPush(const Operand& operand); void EmitPush(const Operand& operand);
void EmitPush(Heap::RootListIndex index);
void EmitPush(Immediate immediate); void EmitPush(Immediate immediate);
// Uses kScratchRegister, emits appropriate relocation info. // Uses kScratchRegister, emits appropriate relocation info.
void EmitPush(Handle<Object> value); void EmitPush(Handle<Object> value);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment