Commit 74ef7530 authored by vegorov@chromium.org's avatar vegorov@chromium.org

Change inlined cache of intanceof stub to use indirection through cell.

The stub was directly patching caller's code without issuing write barrier which violated incremental marking invariants.

R=mstarzinger@chromium.org
BUG=http://crbug.com/109448
TEST=cctest/test-heap/InstanceOfStubWriteBarrier

Review URL: http://codereview.chromium.org/9158015

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@10380 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 62168ff8
......@@ -4081,7 +4081,7 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
const Register inline_site = r9;
const Register scratch = r2;
const int32_t kDeltaToLoadBoolResult = 3 * kPointerSize;
const int32_t kDeltaToLoadBoolResult = 4 * kPointerSize;
Label slow, loop, is_instance, is_not_instance, not_js_object;
......@@ -4132,7 +4132,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ sub(inline_site, lr, scratch);
// Get the map location in scratch and patch it.
__ GetRelocatedValueLocation(inline_site, scratch);
__ str(map, MemOperand(scratch));
__ ldr(scratch, MemOperand(scratch));
__ str(map, FieldMemOperand(scratch, JSGlobalPropertyCell::kValueOffset));
}
// Register mapping: r3 is object map and r4 is function prototype.
......
......@@ -2143,7 +2143,10 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
// We use Factory::the_hole_value() on purpose instead of loading from the
// root array to force relocation to be able to later patch with
// the cached map.
__ mov(ip, Operand(factory()->the_hole_value()));
Handle<JSGlobalPropertyCell> cell =
factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
__ mov(ip, Operand(Handle<Object>(cell)));
__ ldr(ip, FieldMemOperand(ip, JSGlobalPropertyCell::kValueOffset));
__ cmp(map, Operand(ip));
__ b(ne, &cache_miss);
// We use Factory::the_hole_value() on purpose instead of loading from the
......
......@@ -5081,8 +5081,8 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
static const int kDeltaToCmpImmediate = 2;
static const int kDeltaToMov = 8;
static const int kDeltaToMovImmediate = 9;
static const int8_t kCmpEdiImmediateByte1 = BitCast<int8_t, uint8_t>(0x81);
static const int8_t kCmpEdiImmediateByte2 = BitCast<int8_t, uint8_t>(0xff);
static const int8_t kCmpEdiOperandByte1 = BitCast<int8_t, uint8_t>(0x3b);
static const int8_t kCmpEdiOperandByte2 = BitCast<int8_t, uint8_t>(0x3d);
static const int8_t kMovEaxImmediateByte = BitCast<int8_t, uint8_t>(0xb8);
ExternalReference roots_array_start =
......@@ -5147,12 +5147,13 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
__ mov(scratch, Operand(esp, 0 * kPointerSize));
__ sub(scratch, Operand(esp, 1 * kPointerSize));
if (FLAG_debug_code) {
__ cmpb(Operand(scratch, 0), kCmpEdiImmediateByte1);
__ cmpb(Operand(scratch, 0), kCmpEdiOperandByte1);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 1)");
__ cmpb(Operand(scratch, 1), kCmpEdiImmediateByte2);
__ cmpb(Operand(scratch, 1), kCmpEdiOperandByte2);
__ Assert(equal, "InstanceofStub unexpected call site cache (cmp 2)");
}
__ mov(Operand(scratch, kDeltaToCmpImmediate), map);
__ mov(scratch, Operand(scratch, kDeltaToCmpImmediate));
__ mov(Operand(scratch, 0), map);
}
// Loop through the prototype chain of the object looking for the function
......
......@@ -1975,7 +1975,9 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Register map = ToRegister(instr->TempAt(0));
__ mov(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
__ cmp(map, factory()->the_hole_value()); // Patched to cached map.
Handle<JSGlobalPropertyCell> cache_cell =
factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
__ cmp(map, Operand::Cell(cache_cell)); // Patched to cached map.
__ j(not_equal, &cache_miss, Label::kNear);
__ mov(eax, factory()->the_hole_value()); // Patched to either true or false.
__ jmp(&done);
......
......@@ -56,6 +56,7 @@ class IncrementalMarking {
}
bool should_hurry() { return should_hurry_; }
void set_should_hurry(bool val) { should_hurry_ = val; }
inline bool IsStopped() { return state() == STOPPED; }
......@@ -219,10 +220,6 @@ class IncrementalMarking {
void UncommitMarkingDeque();
private:
void set_should_hurry(bool val) {
should_hurry_ = val;
}
int64_t SpaceLeftInOldSpace();
void ResetStepCounters();
......
......@@ -4156,12 +4156,14 @@ void InstanceofStub::Generate(MacroAssembler* masm) {
// Get return address and delta to inlined map check.
__ movq(kScratchRegister, Operand(rsp, 0 * kPointerSize));
__ subq(kScratchRegister, Operand(rsp, 1 * kPointerSize));
__ movq(Operand(kScratchRegister, kOffsetToMapCheckValue), rax);
if (FLAG_debug_code) {
__ movl(rdi, Immediate(kWordBeforeMapCheckValue));
__ cmpl(Operand(kScratchRegister, kOffsetToMapCheckValue - 4), rdi);
__ Assert(equal, "InstanceofStub unexpected call site cache (check).");
}
__ movq(kScratchRegister,
Operand(kScratchRegister, kOffsetToMapCheckValue));
__ movq(Operand(kScratchRegister, 0), rax);
}
__ movq(rcx, FieldOperand(rax, Map::kPrototypeOffset));
......
......@@ -1901,9 +1901,10 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) {
Register map = ToRegister(instr->TempAt(0));
__ movq(map, FieldOperand(object, HeapObject::kMapOffset));
__ bind(deferred->map_check()); // Label for calculating code patching.
__ movq(kScratchRegister, factory()->the_hole_value(),
RelocInfo::EMBEDDED_OBJECT);
__ cmpq(map, kScratchRegister); // Patched to cached map.
Handle<JSGlobalPropertyCell> cache_cell =
factory()->NewJSGlobalPropertyCell(factory()->the_hole_value());
__ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL);
__ cmpq(map, Operand(kScratchRegister, 0));
__ j(not_equal, &cache_miss, Label::kNear);
// Patched to load either true or false.
__ LoadRoot(ToRegister(instr->result()), Heap::kTheHoleValueRootIndex);
......
......@@ -1476,3 +1476,59 @@ TEST(LeakGlobalContextViaMapProto) {
HEAP->CollectAllAvailableGarbage();
CHECK_EQ(0, NumberOfGlobalObjects());
}
TEST(InstanceOfStubWriteBarrier) {
if (!i::FLAG_crankshaft) return;
i::FLAG_allow_natives_syntax = true;
i::FLAG_verify_heap = true;
InitializeVM();
v8::HandleScope outer_scope;
{
v8::HandleScope scope;
CompileRun(
"function foo () { }"
"function mkbar () { return new (new Function(\"\")) (); }"
"function f (x) { return (x instanceof foo); }"
"function g () { f(mkbar()); }"
"f(new foo()); f(new foo());"
"%OptimizeFunctionOnNextCall(f);"
"f(new foo()); g();");
}
IncrementalMarking* marking = HEAP->incremental_marking();
marking->Abort();
marking->Start();
Handle<JSFunction> f =
v8::Utils::OpenHandle(
*v8::Handle<v8::Function>::Cast(
v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
CHECK(f->IsOptimized());
while (!Marking::IsBlack(Marking::MarkBitFrom(f->code())) &&
!marking->IsStopped()) {
marking->Step(MB);
}
CHECK(marking->IsMarking());
// Discard any pending GC requests otherwise we will get GC when we enter
// code below.
if (ISOLATE->stack_guard()->IsGCRequest()) {
ISOLATE->stack_guard()->Continue(GC_REQUEST);
}
{
v8::HandleScope scope;
v8::Handle<v8::Object> global = v8::Context::GetCurrent()->Global();
v8::Handle<v8::Function> g =
v8::Handle<v8::Function>::Cast(global->Get(v8_str("g")));
g->Call(global, 0, NULL);
}
HEAP->incremental_marking()->set_should_hurry(true);
HEAP->CollectGarbage(OLD_POINTER_SPACE);
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment