Commit 12d333b7 authored by Toon Verwaest's avatar Toon Verwaest Committed by Commit Bot

[macro-assembler] Delete unused RecordWriteForMap

Bug: 
Change-Id: Ifde89f90fe18a0747f4b7b9511fbdc64df31555b
Reviewed-on: https://chromium-review.googlesource.com/707063
Commit-Queue: Toon Verwaest <verwaest@chromium.org>
Reviewed-by: 's avatarHannes Payer <hpayer@chromium.org>
Cr-Commit-Position: refs/heads/master@{#48384}
parent c34a2954
......@@ -613,81 +613,6 @@ void TurboAssembler::CallRecordWriteStub(
RestoreRegisters(registers);
}
// Will clobber 3 registers: object, map and dst. The register 'object' contains
// a heap object pointer. A scratch register also needs to be available.
void MacroAssembler::RecordWriteForMap(Register object,
Register map,
Register dst,
LinkRegisterStatus lr_status,
SaveFPRegsMode fp_mode) {
if (emit_debug_code()) {
ldr(dst, FieldMemOperand(map, HeapObject::kMapOffset));
cmp(dst, Operand(isolate()->factory()->meta_map()));
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
if (!FLAG_incremental_marking) {
return;
}
if (emit_debug_code()) {
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
ldr(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
cmp(scratch, map);
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
Label done;
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
CheckPageFlag(map,
map, // Used as scratch.
MemoryChunk::kPointersToHereAreInterestingMask,
eq,
&done);
add(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
if (emit_debug_code()) {
Label ok;
tst(dst, Operand(kPointerSize - 1));
b(eq, &ok);
stop("Unaligned cell in write barrier");
bind(&ok);
}
// Record the actual write.
if (lr_status == kLRHasNotBeenSaved) {
push(lr);
}
RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
fp_mode);
CallStub(&stub);
if (lr_status == kLRHasNotBeenSaved) {
pop(lr);
}
bind(&done);
// Count number of write barriers in generated code.
isolate()->counters()->write_barriers_static()->Increment();
{
UseScratchRegisterScope temps(this);
IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1,
temps.Acquire(), dst);
}
// Clobber clobbered registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
mov(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
mov(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
}
}
// Will clobber 3 registers: object, address, and value. The register 'object'
// contains a heap object pointer. The heap object tag is shifted away.
// A scratch register also needs to be available.
......
......@@ -680,9 +680,6 @@ class MacroAssembler : public TurboAssembler {
pointers_to_here_check_for_value);
}
void RecordWriteForMap(Register object, Register map, Register dst,
LinkRegisterStatus lr_status, SaveFPRegsMode save_fp);
// For a given |object| notify the garbage collector that the slot |address|
// has been written. |value| is the object being stored. The value and
// address registers are clobbered by the operation.
......
......@@ -3061,78 +3061,6 @@ void TurboAssembler::CallRecordWriteStub(
RestoreRegisters(registers);
}
// Will clobber: object, map, dst.
// If lr_status is kLRHasBeenSaved, lr will also be clobbered.
void MacroAssembler::RecordWriteForMap(Register object,
Register map,
Register dst,
LinkRegisterStatus lr_status,
SaveFPRegsMode fp_mode) {
ASM_LOCATION_IN_ASSEMBLER("MacroAssembler::RecordWrite");
DCHECK(!AreAliased(object, map));
if (emit_debug_code()) {
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
CompareObjectMap(map, temp, isolate()->factory()->meta_map());
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
if (!FLAG_incremental_marking) {
return;
}
if (emit_debug_code()) {
UseScratchRegisterScope temps(this);
Register temp = temps.AcquireX();
Ldr(temp, FieldMemOperand(object, HeapObject::kMapOffset));
Cmp(temp, map);
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
// First, check if a write barrier is even needed. The tests below
// catch stores of smis and stores into the young generation.
Label done;
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
CheckPageFlagClear(map,
map, // Used as scratch.
MemoryChunk::kPointersToHereAreInterestingMask,
&done);
// Record the actual write.
if (lr_status == kLRHasNotBeenSaved) {
Push(padreg, lr);
}
Add(dst, object, HeapObject::kMapOffset - kHeapObjectTag);
RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
fp_mode);
CallStub(&stub);
if (lr_status == kLRHasNotBeenSaved) {
Pop(lr, padreg);
}
Bind(&done);
// Count number of write barriers in generated code.
isolate()->counters()->write_barriers_static()->Increment();
IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, map,
dst);
// Clobber clobbered registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
Mov(dst, Operand(bit_cast<int64_t>(kZapValue + 12)));
Mov(map, Operand(bit_cast<int64_t>(kZapValue + 16)));
}
}
// Will clobber: object, address, value.
// If lr_status is kLRHasBeenSaved, lr will also be clobbered.
//
......
......@@ -2140,13 +2140,6 @@ class MacroAssembler : public TurboAssembler {
pointers_to_here_check_for_value);
}
void RecordWriteForMap(
Register object,
Register map,
Register dst,
LinkRegisterStatus lr_status,
SaveFPRegsMode save_fp);
// For a given |object| notify the garbage collector that the slot |address|
// has been written. |value| is the object being stored. The value and
// address registers are clobbered by the operation.
......
......@@ -376,67 +376,6 @@ void TurboAssembler::CallRecordWriteStub(
RestoreRegisters(registers);
}
void MacroAssembler::RecordWriteForMap(
Register object,
Handle<Map> map,
Register scratch1,
Register scratch2,
SaveFPRegsMode save_fp) {
Label done;
Register address = scratch1;
Register value = scratch2;
if (emit_debug_code()) {
Label ok;
lea(address, FieldOperand(object, HeapObject::kMapOffset));
test_b(address, Immediate((1 << kPointerSizeLog2) - 1));
j(zero, &ok, Label::kNear);
int3();
bind(&ok);
}
DCHECK(object != value);
DCHECK(object != address);
DCHECK(value != address);
AssertNotSmi(object);
if (!FLAG_incremental_marking) {
return;
}
// Compute the address.
lea(address, FieldOperand(object, HeapObject::kMapOffset));
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
DCHECK(!isolate()->heap()->InNewSpace(*map));
CheckPageFlagForMap(map,
MemoryChunk::kPointersToHereAreInterestingMask,
zero,
&done,
Label::kNear);
RecordWriteStub stub(isolate(), object, value, address, OMIT_REMEMBERED_SET,
save_fp);
CallStub(&stub);
bind(&done);
// Count number of write barriers in generated code.
isolate()->counters()->write_barriers_static()->Increment();
IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
// Clobber clobbered input registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
mov(value, Immediate(bit_cast<int32_t>(kZapValue)));
mov(scratch1, Immediate(bit_cast<int32_t>(kZapValue)));
mov(scratch2, Immediate(bit_cast<int32_t>(kZapValue)));
}
}
void MacroAssembler::RecordWrite(
Register object,
......@@ -1840,30 +1779,6 @@ void TurboAssembler::CheckPageFlag(Register object, Register scratch, int mask,
j(cc, condition_met, condition_met_distance);
}
void MacroAssembler::CheckPageFlagForMap(
Handle<Map> map,
int mask,
Condition cc,
Label* condition_met,
Label::Distance condition_met_distance) {
DCHECK(cc == zero || cc == not_zero);
Page* page = Page::FromAddress(map->address());
DCHECK(!serializer_enabled()); // Serializer cannot match page_flags.
ExternalReference reference(ExternalReference::page_flags(page));
// The inlined static address check of the page's flags relies
// on maps never being compacted.
DCHECK(!isolate()->heap()->mark_compact_collector()->
IsOnEvacuationCandidate(*map));
if (mask < (1 << kBitsPerByte)) {
test_b(Operand::StaticVariable(reference), Immediate(mask));
} else {
test(Operand::StaticVariable(reference), Immediate(mask));
}
j(cc, condition_met, condition_met_distance);
}
void MacroAssembler::JumpIfBlack(Register object,
Register scratch0,
Register scratch1,
......
......@@ -392,10 +392,6 @@ class MacroAssembler : public TurboAssembler {
SaveFPRegsMode save_fp,
RememberedSetFinalAction and_then);
void CheckPageFlagForMap(
Handle<Map> map, int mask, Condition cc, Label* condition_met,
Label::Distance condition_met_distance = Label::kFar);
// Check if object is in new space. Jumps if the object is not in new space.
// The register scratch can be object itself, but scratch will be clobbered.
void JumpIfNotInNewSpace(Register object, Register scratch, Label* branch,
......@@ -476,12 +472,6 @@ class MacroAssembler : public TurboAssembler {
PointersToHereCheck pointers_to_here_check_for_value =
kPointersToHereMaybeInteresting);
// For page containing |object| mark the region covering the object's map
// dirty. |object| is the object being stored into, |map| is the Map object
// that was stored.
void RecordWriteForMap(Register object, Handle<Map> map, Register scratch1,
Register scratch2, SaveFPRegsMode save_fp);
// Frame restart support
void MaybeDropFrames();
......
......@@ -300,86 +300,6 @@ void TurboAssembler::CallRecordWriteStub(
RestoreRegisters(registers);
}
// Clobbers object, dst, map, and ra, if (ra_status == kRAHasBeenSaved)
void MacroAssembler::RecordWriteForMap(Register object,
Register map,
Register dst,
RAStatus ra_status,
SaveFPRegsMode fp_mode) {
if (emit_debug_code()) {
DCHECK(dst != at);
lw(dst, FieldMemOperand(map, HeapObject::kMapOffset));
Check(eq,
kWrongAddressOrValuePassedToRecordWrite,
dst,
Operand(isolate()->factory()->meta_map()));
}
if (!FLAG_incremental_marking) {
return;
}
if (emit_debug_code()) {
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
lw(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Check(eq, kWrongAddressOrValuePassedToRecordWrite, map, Operand(scratch));
}
Label done;
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
CheckPageFlag(map,
map, // Used as scratch.
MemoryChunk::kPointersToHereAreInterestingMask,
eq,
&done);
Addu(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
if (emit_debug_code()) {
Label ok;
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
And(scratch, dst, Operand(kPointerSize - 1));
Branch(&ok, eq, scratch, Operand(zero_reg));
stop("Unaligned cell in write barrier");
bind(&ok);
}
// Record the actual write.
if (ra_status == kRAHasNotBeenSaved) {
push(ra);
}
RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
fp_mode);
CallStub(&stub);
if (ra_status == kRAHasNotBeenSaved) {
pop(ra);
}
bind(&done);
{
// Count number of write barriers in generated code.
isolate()->counters()->write_barriers_static()->Increment();
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1,
scratch, dst);
}
// Clobber clobbered registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
li(dst, Operand(bit_cast<int32_t>(kZapValue + 12)));
li(map, Operand(bit_cast<int32_t>(kZapValue + 16)));
}
}
// Clobbers object, address, value, and ra, if (ra_status == kRAHasBeenSaved)
// The register 'object' contains a heap object pointer. The heap object
// tag is shifted away.
......
......@@ -1022,9 +1022,6 @@ class MacroAssembler : public TurboAssembler {
pointers_to_here_check_for_value);
}
void RecordWriteForMap(Register object, Register map, Register dst,
RAStatus ra_status, SaveFPRegsMode save_fp);
// For a given |object| notify the garbage collector that the slot |address|
// has been written. |value| is the object being stored. The value and
// address registers are clobbered by the operation.
......
......@@ -300,86 +300,6 @@ void TurboAssembler::CallRecordWriteStub(
RestoreRegisters(registers);
}
// Clobbers object, dst, map, and ra, if (ra_status == kRAHasBeenSaved)
void MacroAssembler::RecordWriteForMap(Register object,
Register map,
Register dst,
RAStatus ra_status,
SaveFPRegsMode fp_mode) {
if (emit_debug_code()) {
DCHECK(dst != at);
Ld(dst, FieldMemOperand(map, HeapObject::kMapOffset));
Check(eq,
kWrongAddressOrValuePassedToRecordWrite,
dst,
Operand(isolate()->factory()->meta_map()));
}
if (!FLAG_incremental_marking) {
return;
}
if (emit_debug_code()) {
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
Ld(scratch, FieldMemOperand(object, HeapObject::kMapOffset));
Check(eq, kWrongAddressOrValuePassedToRecordWrite, map, Operand(scratch));
}
Label done;
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
CheckPageFlag(map,
map, // Used as scratch.
MemoryChunk::kPointersToHereAreInterestingMask,
eq,
&done);
Daddu(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
if (emit_debug_code()) {
Label ok;
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
And(scratch, dst, Operand(kPointerSize - 1));
Branch(&ok, eq, scratch, Operand(zero_reg));
stop("Unaligned cell in write barrier");
bind(&ok);
}
// Record the actual write.
if (ra_status == kRAHasNotBeenSaved) {
push(ra);
}
RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
fp_mode);
CallStub(&stub);
if (ra_status == kRAHasNotBeenSaved) {
pop(ra);
}
bind(&done);
{
// Count number of write barriers in generated code.
UseScratchRegisterScope temps(this);
Register scratch = temps.Acquire();
isolate()->counters()->write_barriers_static()->Increment();
IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1,
scratch, dst);
}
// Clobber clobbered registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
li(dst, Operand(bit_cast<int64_t>(kZapValue + 12)));
li(map, Operand(bit_cast<int64_t>(kZapValue + 16)));
}
}
// Clobbers object, address, value, and ra, if (ra_status == kRAHasBeenSaved)
// The register 'object' contains a heap object pointer. The heap object
// tag is shifted away.
......
......@@ -1083,13 +1083,6 @@ class MacroAssembler : public TurboAssembler {
pointers_to_here_check_for_value);
}
void RecordWriteForMap(
Register object,
Register map,
Register dst,
RAStatus ra_status,
SaveFPRegsMode save_fp);
// For a given |object| notify the garbage collector that the slot |address|
// has been written. |value| is the object being stored. The value and
// address registers are clobbered by the operation.
......
......@@ -439,75 +439,6 @@ void TurboAssembler::CallRecordWriteStub(
RestoreRegisters(registers);
}
// Will clobber 4 registers: object, map, dst, ip. The
// register 'object' contains a heap object pointer.
void MacroAssembler::RecordWriteForMap(Register object, Register map,
Register dst,
LinkRegisterStatus lr_status,
SaveFPRegsMode fp_mode) {
if (emit_debug_code()) {
LoadP(dst, FieldMemOperand(map, HeapObject::kMapOffset));
Cmpi(dst, Operand(isolate()->factory()->meta_map()), r0);
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
if (!FLAG_incremental_marking) {
return;
}
if (emit_debug_code()) {
LoadP(ip, FieldMemOperand(object, HeapObject::kMapOffset));
cmp(ip, map);
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
Label done;
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
CheckPageFlag(map,
map, // Used as scratch.
MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
addi(dst, object, Operand(HeapObject::kMapOffset - kHeapObjectTag));
if (emit_debug_code()) {
Label ok;
andi(r0, dst, Operand(kPointerSize - 1));
beq(&ok, cr0);
stop("Unaligned cell in write barrier");
bind(&ok);
}
// Record the actual write.
if (lr_status == kLRHasNotBeenSaved) {
mflr(r0);
push(r0);
}
RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
fp_mode);
CallStub(&stub);
if (lr_status == kLRHasNotBeenSaved) {
pop(r0);
mtlr(r0);
}
bind(&done);
// Count number of write barriers in generated code.
isolate()->counters()->write_barriers_static()->Increment();
IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
// Clobber clobbered registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
mov(dst, Operand(bit_cast<intptr_t>(kZapValue + 12)));
mov(map, Operand(bit_cast<intptr_t>(kZapValue + 16)));
}
}
// Will clobber 4 registers: object, address, scratch, ip. The
// register 'object' contains a heap object pointer. The heap object
// tag is shifted away.
......
......@@ -733,9 +733,6 @@ class MacroAssembler : public TurboAssembler {
pointers_to_here_check_for_value);
}
void RecordWriteForMap(Register object, Register map, Register dst,
LinkRegisterStatus lr_status, SaveFPRegsMode save_fp);
// For a given |object| notify the garbage collector that the slot |address|
// has been written. |value| is the object being stored. The value and
// address registers are clobbered by the operation.
......
......@@ -437,71 +437,6 @@ void TurboAssembler::CallRecordWriteStub(
RestoreRegisters(registers);
}
// Will clobber 4 registers: object, map, dst, ip. The
// register 'object' contains a heap object pointer.
void MacroAssembler::RecordWriteForMap(Register object, Register map,
Register dst,
LinkRegisterStatus lr_status,
SaveFPRegsMode fp_mode) {
if (emit_debug_code()) {
LoadP(dst, FieldMemOperand(map, HeapObject::kMapOffset));
CmpP(dst, Operand(isolate()->factory()->meta_map()));
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
if (!FLAG_incremental_marking) {
return;
}
if (emit_debug_code()) {
CmpP(map, FieldMemOperand(object, HeapObject::kMapOffset));
Check(eq, kWrongAddressOrValuePassedToRecordWrite);
}
Label done;
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
CheckPageFlag(map,
map, // Used as scratch.
MemoryChunk::kPointersToHereAreInterestingMask, eq, &done);
lay(dst, MemOperand(object, HeapObject::kMapOffset - kHeapObjectTag));
if (emit_debug_code()) {
Label ok;
AndP(r0, dst, Operand(kPointerSize - 1));
beq(&ok, Label::kNear);
stop("Unaligned cell in write barrier");
bind(&ok);
}
// Record the actual write.
if (lr_status == kLRHasNotBeenSaved) {
push(r14);
}
RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
fp_mode);
CallStub(&stub);
if (lr_status == kLRHasNotBeenSaved) {
pop(r14);
}
bind(&done);
// Count number of write barriers in generated code.
isolate()->counters()->write_barriers_static()->Increment();
IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1, ip, dst);
// Clobber clobbered registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
mov(dst, Operand(bit_cast<intptr_t>(kZapValue + 12)));
mov(map, Operand(bit_cast<intptr_t>(kZapValue + 16)));
}
}
// Will clobber 4 registers: object, address, scratch, ip. The
// register 'object' contains a heap object pointer. The heap object
// tag is shifted away.
......
......@@ -1373,9 +1373,6 @@ class MacroAssembler : public TurboAssembler {
pointers_to_here_check_for_value);
}
void RecordWriteForMap(Register object, Register map, Register dst,
LinkRegisterStatus lr_status, SaveFPRegsMode save_fp);
// For a given |object| notify the garbage collector that the slot |address|
// has been written. |value| is the object being stored. The value and
// address registers are clobbered by the operation.
......
......@@ -338,77 +338,6 @@ void TurboAssembler::CallRecordWriteStub(
RestoreRegisters(registers);
}
void MacroAssembler::RecordWriteForMap(Register object,
Register map,
Register dst,
SaveFPRegsMode fp_mode) {
DCHECK(object != kScratchRegister);
DCHECK(object != map);
DCHECK(object != dst);
DCHECK(map != dst);
AssertNotSmi(object);
if (emit_debug_code()) {
Label ok;
if (map == kScratchRegister) pushq(map);
CompareMap(map, isolate()->factory()->meta_map());
if (map == kScratchRegister) popq(map);
j(equal, &ok, Label::kNear);
int3();
bind(&ok);
}
if (!FLAG_incremental_marking) {
return;
}
if (emit_debug_code()) {
Label ok;
if (map == kScratchRegister) pushq(map);
cmpp(map, FieldOperand(object, HeapObject::kMapOffset));
if (map == kScratchRegister) popq(map);
j(equal, &ok, Label::kNear);
int3();
bind(&ok);
}
// Compute the address.
leap(dst, FieldOperand(object, HeapObject::kMapOffset));
// First, check if a write barrier is even needed. The tests below
// catch stores of smis and stores into the young generation.
Label done;
// A single check of the map's pages interesting flag suffices, since it is
// only set during incremental collection, and then it's also guaranteed that
// the from object's page's interesting flag is also set. This optimization
// relies on the fact that maps can never be in new space.
CheckPageFlag(map,
map, // Used as scratch.
MemoryChunk::kPointersToHereAreInterestingMask,
zero,
&done,
Label::kNear);
RecordWriteStub stub(isolate(), object, map, dst, OMIT_REMEMBERED_SET,
fp_mode);
CallStub(&stub);
bind(&done);
// Count number of write barriers in generated code.
isolate()->counters()->write_barriers_static()->Increment();
IncrementCounter(isolate()->counters()->write_barriers_dynamic(), 1);
// Clobber clobbered registers when running with the debug-code flag
// turned on to provoke errors.
if (emit_debug_code()) {
Move(dst, kZapValue, Assembler::RelocInfoNone());
Move(map, kZapValue, Assembler::RelocInfoNone());
}
}
void MacroAssembler::RecordWrite(
Register object,
Register address,
......
......@@ -622,12 +622,6 @@ class MacroAssembler : public TurboAssembler {
pointers_to_here_check_for_value);
}
void RecordWriteForMap(
Register object,
Register map,
Register dst,
SaveFPRegsMode save_fp);
// For page containing |object| mark region covering |address|
// dirty. |object| is the object being stored into, |value| is the
// object being stored. The address and value registers are clobbered by the
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment