Commit 16e4c295 authored by danno@chromium.org's avatar danno@chromium.org

Optimize write barrier of map-only elements transitions

R=mstarzinger@chromium.org

Review URL: https://chromiumcodereview.appspot.com/10544005

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@11729 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 31a0d025
...@@ -1133,6 +1133,12 @@ ExternalReference ExternalReference::math_log_double_function( ...@@ -1133,6 +1133,12 @@ ExternalReference ExternalReference::math_log_double_function(
} }
ExternalReference ExternalReference::page_flags(Page* page) {
return ExternalReference(reinterpret_cast<Address>(page) +
MemoryChunk::kFlagsOffset);
}
// Helper function to compute x^y, where y is known to be an // Helper function to compute x^y, where y is known to be an
// integer. Uses binary decomposition to limit the number of // integer. Uses binary decomposition to limit the number of
// multiplications; see the discussion in "Hacker's Delight" by Henry // multiplications; see the discussion in "Hacker's Delight" by Henry
......
...@@ -656,6 +656,8 @@ class ExternalReference BASE_EMBEDDED { ...@@ -656,6 +656,8 @@ class ExternalReference BASE_EMBEDDED {
static ExternalReference math_tan_double_function(Isolate* isolate); static ExternalReference math_tan_double_function(Isolate* isolate);
static ExternalReference math_log_double_function(Isolate* isolate); static ExternalReference math_log_double_function(Isolate* isolate);
static ExternalReference page_flags(Page* page);
Address address() const {return reinterpret_cast<Address>(address_);} Address address() const {return reinterpret_cast<Address>(address_);}
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
......
...@@ -3573,18 +3573,25 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { ...@@ -3573,18 +3573,25 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
ElementsKind to_kind = to_map->elements_kind(); ElementsKind to_kind = to_map->elements_kind();
Label not_applicable; Label not_applicable;
bool is_simple_map_transition =
IsSimpleMapChangeTransition(from_kind, to_kind);
Label::Distance branch_distance =
is_simple_map_transition ? Label::kNear : Label::kFar;
__ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map); __ cmp(FieldOperand(object_reg, HeapObject::kMapOffset), from_map);
__ j(not_equal, &not_applicable); __ j(not_equal, &not_applicable, branch_distance);
__ mov(new_map_reg, to_map); if (is_simple_map_transition) {
if (IsSimpleMapChangeTransition(from_kind, to_kind)) {
Register object_reg = ToRegister(instr->object()); Register object_reg = ToRegister(instr->object());
__ mov(FieldOperand(object_reg, HeapObject::kMapOffset), new_map_reg); Handle<Map> map = instr->hydrogen()->transitioned_map();
__ mov(FieldOperand(object_reg, HeapObject::kMapOffset),
Immediate(map));
// Write barrier. // Write barrier.
ASSERT_NE(instr->temp_reg(), NULL); ASSERT_NE(instr->temp_reg(), NULL);
__ RecordWriteField(object_reg, HeapObject::kMapOffset, new_map_reg, __ RecordWriteForMap(object_reg, to_map, new_map_reg,
ToRegister(instr->temp_reg()), kDontSaveFPRegs); ToRegister(instr->temp_reg()),
kDontSaveFPRegs);
} else if (IsFastSmiElementsKind(from_kind) && } else if (IsFastSmiElementsKind(from_kind) &&
IsFastDoubleElementsKind(to_kind)) { IsFastDoubleElementsKind(to_kind)) {
__ mov(new_map_reg, to_map);
Register fixed_object_reg = ToRegister(instr->temp_reg()); Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(edx)); ASSERT(fixed_object_reg.is(edx));
ASSERT(new_map_reg.is(ebx)); ASSERT(new_map_reg.is(ebx));
...@@ -3593,6 +3600,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) { ...@@ -3593,6 +3600,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
RelocInfo::CODE_TARGET, instr); RelocInfo::CODE_TARGET, instr);
} else if (IsFastDoubleElementsKind(from_kind) && } else if (IsFastDoubleElementsKind(from_kind) &&
IsFastObjectElementsKind(to_kind)) { IsFastObjectElementsKind(to_kind)) {
__ mov(new_map_reg, to_map);
Register fixed_object_reg = ToRegister(instr->temp_reg()); Register fixed_object_reg = ToRegister(instr->temp_reg());
ASSERT(fixed_object_reg.is(edx)); ASSERT(fixed_object_reg.is(edx));
ASSERT(new_map_reg.is(ebx)); ASSERT(new_map_reg.is(ebx));
......
...@@ -237,6 +237,70 @@ void MacroAssembler::RecordWriteField( ...@@ -237,6 +237,70 @@ void MacroAssembler::RecordWriteField(
} }
void MacroAssembler::RecordWriteForMap(
Register object,
Handle<Map> map,
Register scratch1,
Register scratch2,
SaveFPRegsMode save_fp) {
// First, check if a write barrier is even needed. The tests below
// catch stores of Smis.
Label done;
Register address = scratch1;
Register value = scratch2;
if (emit_debug_code()) {
Label ok;
lea(address, FieldOperand(object, HeapObject::kMapOffset));
test_b(address, (1 << kPointerSizeLog2) - 1);
j(zero, &ok, Label::kNear);
int3();
bind(&ok);
}
ASSERT(!object.is(value));
ASSERT(!object.is(address));
ASSERT(!value.is(address));
if (emit_debug_code()) {
AbortIfSmi(object);
}
if (!FLAG_incremental_marking) {
return;
}
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
ASSERT(!isolate()->heap()->InNewSpace(*map));
CheckPageFlagForMap(map,
MemoryChunk::kPointersToHereAreInterestingMask,
zero,
&done,
Label::kNear);
// Delay the initialization of |address| and |value| for the stub until it's
// known that the will be needed. Up until this point their value are not
// needed since they are embedded in the operands of instructions that need
// them.
lea(address, FieldOperand(object, HeapObject::kMapOffset));
mov(value, Immediate(map));
RecordWriteStub stub(object, value, address, OMIT_REMEMBERED_SET, save_fp);
CallStub(&stub);
bind(&done);
// Clobber clobbered input registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
mov(value, Immediate(BitCast<int32_t>(kZapValue)));
mov(scratch1, Immediate(BitCast<int32_t>(kZapValue)));
mov(scratch2, Immediate(BitCast<int32_t>(kZapValue)));
}
}
void MacroAssembler::RecordWrite(Register object, void MacroAssembler::RecordWrite(Register object,
Register address, Register address,
Register value, Register value,
...@@ -2618,6 +2682,28 @@ void MacroAssembler::CheckPageFlag( ...@@ -2618,6 +2682,28 @@ void MacroAssembler::CheckPageFlag(
} }
void MacroAssembler::CheckPageFlagForMap(
Handle<Map> map,
int mask,
Condition cc,
Label* condition_met,
Label::Distance condition_met_distance) {
ASSERT(cc == zero || cc == not_zero);
Page* page = Page::FromAddress(map->address());
ExternalReference reference(ExternalReference::page_flags(page));
// The inlined static address check of the page's flags relies
// on maps never being compacted.
ASSERT(!isolate()->heap()->mark_compact_collector()->
IsOnEvacuationCandidate(*map));
if (mask < (1 << kBitsPerByte)) {
test_b(Operand::StaticVariable(reference), static_cast<uint8_t>(mask));
} else {
test(Operand::StaticVariable(reference), Immediate(mask));
}
j(cc, condition_met, condition_met_distance);
}
void MacroAssembler::JumpIfBlack(Register object, void MacroAssembler::JumpIfBlack(Register object,
Register scratch0, Register scratch0,
Register scratch1, Register scratch1,
......
...@@ -90,6 +90,13 @@ class MacroAssembler: public Assembler { ...@@ -90,6 +90,13 @@ class MacroAssembler: public Assembler {
Label* condition_met, Label* condition_met,
Label::Distance condition_met_distance = Label::kFar); Label::Distance condition_met_distance = Label::kFar);
void CheckPageFlagForMap(
Handle<Map> map,
int mask,
Condition cc,
Label* condition_met,
Label::Distance condition_met_distance = Label::kFar);
// Check if object is in new space. Jumps if the object is not in new space. // Check if object is in new space. Jumps if the object is not in new space.
// The register scratch can be object itself, but scratch will be clobbered. // The register scratch can be object itself, but scratch will be clobbered.
void JumpIfNotInNewSpace(Register object, void JumpIfNotInNewSpace(Register object,
...@@ -194,6 +201,16 @@ class MacroAssembler: public Assembler { ...@@ -194,6 +201,16 @@ class MacroAssembler: public Assembler {
RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET, RememberedSetAction remembered_set_action = EMIT_REMEMBERED_SET,
SmiCheck smi_check = INLINE_SMI_CHECK); SmiCheck smi_check = INLINE_SMI_CHECK);
// For page containing |object| mark the region covering the object's map
// dirty. |object| is the object being stored into, |map| is the Map object
// that was stored.
void RecordWriteForMap(
Register object,
Handle<Map> map,
Register scratch1,
Register scratch2,
SaveFPRegsMode save_fp);
#ifdef ENABLE_DEBUGGER_SUPPORT #ifdef ENABLE_DEBUGGER_SUPPORT
// --------------------------------------------------------------------------- // ---------------------------------------------------------------------------
// Debugger Support // Debugger Support
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment