Commit bb1abb7f authored by plind44@gmail.com's avatar plind44@gmail.com

MIPS: CodeStubs contain their corresponding Isolate* now. (part 1)

Port r20919 (aa51355)

Original commit message:
This is a purely mechanical change, adding an Isolate* to the CodeStub
constructor and a corresponding field plus a getter. A few methods in
CodeStub and its subclasses can be simplified now, but this is done in
a separate CL.

The underlying reason apart from simplicity is that deep down in the
call chain we need to detect if the serializer is active or not. This
information will be part of the Isolate, not a global variable with
funky synchronization primitives around it (which is fundamentally
wrong and the underlying cause for race conditions and a catch-22
during initialization).

BUG=359977
LOG=y
R=plind44@gmail.com

Review URL: https://codereview.chromium.org/252383005

Patch from Balazs Kilvady <kilvadyb@homejinni.com>.

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@20944 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
parent 50945297
......@@ -824,7 +824,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
if (is_construct) {
// No type feedback cell is available
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
CallConstructStub stub(masm->isolate(), NO_CALL_FUNCTION_FLAGS);
__ CallStub(&stub);
} else {
ParameterCount actual(a0);
......
This diff is collapsed.
......@@ -40,8 +40,8 @@ void ArrayNativeCode(MacroAssembler* masm, Label* call_generic_code);
class StoreBufferOverflowStub: public PlatformCodeStub {
public:
explicit StoreBufferOverflowStub(SaveFPRegsMode save_fp)
: save_doubles_(save_fp) {}
StoreBufferOverflowStub(Isolate* isolate, SaveFPRegsMode save_fp)
: PlatformCodeStub(isolate), save_doubles_(save_fp) {}
void Generate(MacroAssembler* masm);
......@@ -93,7 +93,7 @@ class StringHelper : public AllStatic {
class SubStringStub: public PlatformCodeStub {
public:
SubStringStub() {}
explicit SubStringStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
private:
Major MajorKey() { return SubString; }
......@@ -104,8 +104,8 @@ class SubStringStub: public PlatformCodeStub {
class StoreRegistersStateStub: public PlatformCodeStub {
public:
explicit StoreRegistersStateStub(SaveFPRegsMode with_fp)
: save_doubles_(with_fp) {}
explicit StoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp)
: PlatformCodeStub(isolate), save_doubles_(with_fp) {}
static void GenerateAheadOfTime(Isolate* isolate);
private:
......@@ -118,8 +118,8 @@ class StoreRegistersStateStub: public PlatformCodeStub {
class RestoreRegistersStateStub: public PlatformCodeStub {
public:
explicit RestoreRegistersStateStub(SaveFPRegsMode with_fp)
: save_doubles_(with_fp) {}
explicit RestoreRegistersStateStub(Isolate* isolate, SaveFPRegsMode with_fp)
: PlatformCodeStub(isolate), save_doubles_(with_fp) {}
static void GenerateAheadOfTime(Isolate* isolate);
private:
......@@ -132,7 +132,7 @@ class RestoreRegistersStateStub: public PlatformCodeStub {
class StringCompareStub: public PlatformCodeStub {
public:
StringCompareStub() { }
explicit StringCompareStub(Isolate* isolate) : PlatformCodeStub(isolate) { }
// Compare two flat ASCII strings and returns result in v0.
static void GenerateCompareFlatAsciiStrings(MacroAssembler* masm,
......@@ -173,11 +173,13 @@ class StringCompareStub: public PlatformCodeStub {
// so you don't have to set up the frame.
class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
public:
WriteInt32ToHeapNumberStub(Register the_int,
WriteInt32ToHeapNumberStub(Isolate* isolate,
Register the_int,
Register the_heap_number,
Register scratch,
Register scratch2)
: the_int_(the_int),
: PlatformCodeStub(isolate),
the_int_(the_int),
the_heap_number_(the_heap_number),
scratch_(scratch),
sign_(scratch2) {
......@@ -216,12 +218,14 @@ class WriteInt32ToHeapNumberStub : public PlatformCodeStub {
class RecordWriteStub: public PlatformCodeStub {
public:
RecordWriteStub(Register object,
RecordWriteStub(Isolate* isolate,
Register object,
Register value,
Register address,
RememberedSetAction remembered_set_action,
SaveFPRegsMode fp_mode)
: object_(object),
: PlatformCodeStub(isolate),
object_(object),
value_(value),
address_(address),
remembered_set_action_(remembered_set_action),
......@@ -406,7 +410,7 @@ class RecordWriteStub: public PlatformCodeStub {
// moved by GC
class DirectCEntryStub: public PlatformCodeStub {
public:
DirectCEntryStub() {}
explicit DirectCEntryStub(Isolate* isolate) : PlatformCodeStub(isolate) {}
void Generate(MacroAssembler* masm);
void GenerateCall(MacroAssembler* masm, Register target);
......@@ -422,7 +426,8 @@ class NameDictionaryLookupStub: public PlatformCodeStub {
public:
enum LookupMode { POSITIVE_LOOKUP, NEGATIVE_LOOKUP };
explicit NameDictionaryLookupStub(LookupMode mode) : mode_(mode) { }
NameDictionaryLookupStub(Isolate* isolate, LookupMode mode)
: PlatformCodeStub(isolate), mode_(mode) { }
void Generate(MacroAssembler* masm);
......
......@@ -156,7 +156,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
__ PrepareCEntryArgs(0); // No arguments.
__ PrepareCEntryFunction(ExternalReference::debug_break(masm->isolate()));
CEntryStub ceb(1);
CEntryStub ceb(masm->isolate(), 1);
__ CallStub(&ceb);
// Restore the register values from the expression stack.
......
......@@ -249,7 +249,7 @@ void FullCodeGenerator::Generate() {
__ Push(info->scope()->GetScopeInfo());
__ CallRuntime(Runtime::kHiddenNewGlobalContext, 2);
} else if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(heap_slots);
FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ push(a1);
......@@ -310,7 +310,7 @@ void FullCodeGenerator::Generate() {
} else {
type = ArgumentsAccessStub::NEW_SLOPPY_FAST;
}
ArgumentsAccessStub stub(type);
ArgumentsAccessStub stub(isolate(), type);
__ CallStub(&stub);
SetVar(arguments, v0, a1, a2);
......@@ -1368,7 +1368,9 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
FastNewClosureStub stub(info->strict_mode(), info->is_generator());
FastNewClosureStub stub(isolate(),
info->strict_mode(),
info->is_generator());
__ li(a2, Operand(info));
__ CallStub(&stub);
} else {
......@@ -1693,7 +1695,7 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ Push(a3, a2, a1, a0);
__ CallRuntime(Runtime::kHiddenCreateObjectLiteral, 4);
} else {
FastCloneShallowObjectStub stub(properties_count);
FastCloneShallowObjectStub stub(isolate(), properties_count);
__ CallStub(&stub);
}
......@@ -1835,6 +1837,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
if (has_fast_elements && constant_elements_values->map() ==
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
isolate(),
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
allocation_site_mode,
length);
......@@ -1856,7 +1859,8 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
}
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
FastCloneShallowArrayStub stub(isolate(), mode, allocation_site_mode,
length);
__ CallStub(&stub);
}
......@@ -1890,7 +1894,7 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
} else {
__ li(a3, Operand(Smi::FromInt(i)));
__ mov(a0, result_register());
StoreArrayLiteralElementStub stub;
StoreArrayLiteralElementStub stub(isolate());
__ CallStub(&stub);
}
......@@ -2138,7 +2142,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
__ mov(a0, v0);
__ mov(a1, a0);
__ sw(a1, MemOperand(sp, 2 * kPointerSize));
CallFunctionStub stub(1, CALL_AS_METHOD);
CallFunctionStub stub(isolate(), 1, CALL_AS_METHOD);
__ CallStub(&stub);
__ lw(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
......@@ -2359,7 +2363,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
patch_site.EmitJumpIfSmi(scratch1, &smi_case);
__ bind(&stub_call);
BinaryOpICStub stub(op, mode);
BinaryOpICStub stub(isolate(), op, mode);
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
__ jmp(&done);
......@@ -2437,7 +2441,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
OverwriteMode mode) {
__ mov(a0, result_register());
__ pop(a1);
BinaryOpICStub stub(op, mode);
BinaryOpICStub stub(isolate(), op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
CallIC(stub.GetCode(isolate()), expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
......@@ -2686,7 +2690,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr) {
}
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, flags);
CallFunctionStub stub(isolate(), arg_count, flags);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
......@@ -2728,7 +2732,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, CALL_AS_METHOD);
CallFunctionStub stub(isolate(), arg_count, CALL_AS_METHOD);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
......@@ -2759,7 +2763,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
__ li(a3, Operand(Smi::FromInt(expr->CallFeedbackSlot())));
// Record call targets in unoptimized code.
CallFunctionStub stub(arg_count, RECORD_CALL_TARGET);
CallFunctionStub stub(isolate(), arg_count, RECORD_CALL_TARGET);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
......@@ -2835,7 +2839,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
}
// Record source position for debugger.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
RecordJSReturnSite(expr);
......@@ -2952,7 +2956,7 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ li(a2, FeedbackVector());
__ li(a3, Operand(Smi::FromInt(expr->CallNewFeedbackSlot())));
CallConstructStub stub(RECORD_CALL_TARGET);
CallConstructStub stub(isolate(), RECORD_CALL_TARGET);
__ Call(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL);
PrepareForBailoutForId(expr->ReturnId(), TOS_REG);
context()->Plug(v0);
......@@ -3328,7 +3332,7 @@ void FullCodeGenerator::EmitArguments(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0));
__ mov(a1, v0);
__ li(a0, Operand(Smi::FromInt(info_->scope()->num_parameters())));
ArgumentsAccessStub stub(ArgumentsAccessStub::READ_ELEMENT);
ArgumentsAccessStub stub(isolate(), ArgumentsAccessStub::READ_ELEMENT);
__ CallStub(&stub);
context()->Plug(v0);
}
......@@ -3439,7 +3443,7 @@ void FullCodeGenerator::EmitLog(CallRuntime* expr) {
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
SubStringStub stub;
SubStringStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
......@@ -3452,7 +3456,7 @@ void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
void FullCodeGenerator::EmitRegExpExec(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
RegExpExecStub stub;
RegExpExecStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 4);
VisitForStackValue(args->at(0));
......@@ -3611,7 +3615,7 @@ void FullCodeGenerator::EmitMathPow(CallRuntime* expr) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
MathPowStub stub(MathPowStub::ON_STACK);
MathPowStub stub(isolate(), MathPowStub::ON_STACK);
__ CallStub(&stub);
context()->Plug(v0);
}
......@@ -3654,7 +3658,7 @@ void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
VisitForAccumulatorValue(args->at(0));
__ mov(a0, result_register());
NumberToStringStub stub;
NumberToStringStub stub(isolate());
__ CallStub(&stub);
context()->Plug(v0);
}
......@@ -3783,7 +3787,7 @@ void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
__ pop(a1);
__ mov(a0, result_register()); // StringAddStub requires args in a0, a1.
StringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
StringAddStub stub(isolate(), STRING_ADD_CHECK_BOTH, NOT_TENURED);
__ CallStub(&stub);
context()->Plug(v0);
}
......@@ -3796,7 +3800,7 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
StringCompareStub stub;
StringCompareStub stub(isolate());
__ CallStub(&stub);
context()->Plug(v0);
}
......@@ -3835,7 +3839,7 @@ void FullCodeGenerator::EmitCallFunction(CallRuntime* expr) {
void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
RegExpConstructResultStub stub;
RegExpConstructResultStub stub(isolate());
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 3);
VisitForStackValue(args->at(0));
......@@ -4210,7 +4214,7 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
// Record source position of the IC call.
SetSourcePosition(expr->position());
CallFunctionStub stub(arg_count, NO_CALL_FUNCTION_FLAGS);
CallFunctionStub stub(isolate(), arg_count, NO_CALL_FUNCTION_FLAGS);
__ lw(a1, MemOperand(sp, (arg_count + 1) * kPointerSize));
__ CallStub(&stub);
......@@ -4432,7 +4436,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ jmp(&stub_call);
__ bind(&slow);
}
ToNumberStub convert_stub;
ToNumberStub convert_stub(isolate());
__ CallStub(&convert_stub);
// Save result for postfix expressions.
......@@ -4462,7 +4466,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Record position before stub call.
SetSourcePosition(expr->position());
BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
BinaryOpICStub stub(isolate(), Token::ADD, NO_OVERWRITE);
CallIC(stub.GetCode(isolate()), expr->CountBinOpFeedbackId());
patch_site.EmitPatchInfo();
__ bind(&done);
......@@ -4671,7 +4675,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
case Token::INSTANCEOF: {
VisitForStackValue(expr->right());
InstanceofStub stub(InstanceofStub::kNoFlags);
InstanceofStub stub(isolate(), InstanceofStub::kNoFlags);
__ CallStub(&stub);
PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
// The stub returns 0 for true.
......
......@@ -203,7 +203,7 @@ bool LCodeGen::GeneratePrologue() {
Comment(";;; Allocate local context");
// Argument to NewContext is the function, which is in a1.
if (heap_slots <= FastNewContextStub::kMaximumSlots) {
FastNewContextStub stub(heap_slots);
FastNewContextStub stub(isolate(), heap_slots);
__ CallStub(&stub);
} else {
__ push(a1);
......@@ -1046,17 +1046,17 @@ void LCodeGen::DoCallStub(LCallStub* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
switch (instr->hydrogen()->major_key()) {
case CodeStub::RegExpExec: {
RegExpExecStub stub;
RegExpExecStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::SubString: {
SubStringStub stub;
SubStringStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
case CodeStub::StringCompare: {
StringCompareStub stub;
StringCompareStub stub(isolate());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
break;
}
......@@ -2011,7 +2011,7 @@ void LCodeGen::DoArithmeticT(LArithmeticT* instr) {
ASSERT(ToRegister(instr->right()).is(a0));
ASSERT(ToRegister(instr->result()).is(v0));
BinaryOpICStub stub(instr->op(), NO_OVERWRITE);
BinaryOpICStub stub(isolate(), instr->op(), NO_OVERWRITE);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
// Other arch use a nop here, to signal that there is no inlined
// patchable code. Mips does not need the nop, since our marker
......@@ -2653,7 +2653,7 @@ void LCodeGen::DoInstanceOf(LInstanceOf* instr) {
Register result = ToRegister(instr->result());
ASSERT(result.is(v0));
InstanceofStub stub(InstanceofStub::kArgsInRegisters);
InstanceofStub stub(isolate(), InstanceofStub::kArgsInRegisters);
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
__ Branch(&true_label, eq, result, Operand(zero_reg));
......@@ -2754,7 +2754,7 @@ void LCodeGen::DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
flags | InstanceofStub::kCallSiteInlineCheck);
flags = static_cast<InstanceofStub::Flags>(
flags | InstanceofStub::kReturnTrueFalseObject);
InstanceofStub stub(flags);
InstanceofStub stub(isolate(), flags);
PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
LoadContextFromDeferred(instr->context());
......@@ -3848,7 +3848,7 @@ void LCodeGen::DoPower(LPower* instr) {
ASSERT(ToDoubleRegister(instr->result()).is(f0));
if (exponent_type.IsSmi()) {
MathPowStub stub(MathPowStub::TAGGED);
MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsTagged()) {
Label no_deopt;
......@@ -3857,14 +3857,14 @@ void LCodeGen::DoPower(LPower* instr) {
__ LoadRoot(at, Heap::kHeapNumberMapRootIndex);
DeoptimizeIf(ne, instr->environment(), t3, Operand(at));
__ bind(&no_deopt);
MathPowStub stub(MathPowStub::TAGGED);
MathPowStub stub(isolate(), MathPowStub::TAGGED);
__ CallStub(&stub);
} else if (exponent_type.IsInteger32()) {
MathPowStub stub(MathPowStub::INTEGER);
MathPowStub stub(isolate(), MathPowStub::INTEGER);
__ CallStub(&stub);
} else {
ASSERT(exponent_type.IsDouble());
MathPowStub stub(MathPowStub::DOUBLE);
MathPowStub stub(isolate(), MathPowStub::DOUBLE);
__ CallStub(&stub);
}
}
......@@ -3968,7 +3968,7 @@ void LCodeGen::DoCallFunction(LCallFunction* instr) {
ASSERT(ToRegister(instr->result()).is(v0));
int arity = instr->arity();
CallFunctionStub stub(arity, instr->hydrogen()->function_flags());
CallFunctionStub stub(isolate(), arity, instr->hydrogen()->function_flags());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
......@@ -3981,7 +3981,7 @@ void LCodeGen::DoCallNew(LCallNew* instr) {
__ li(a0, Operand(instr->arity()));
// No cell in a2 for construct type feedback in optimized code
__ LoadRoot(a2, Heap::kUndefinedValueRootIndex);
CallConstructStub stub(NO_CALL_FUNCTION_FLAGS);
CallConstructStub stub(isolate(), NO_CALL_FUNCTION_FLAGS);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
......@@ -4000,7 +4000,7 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
: DONT_OVERRIDE;
if (instr->arity() == 0) {
ArrayNoArgumentConstructorStub stub(kind, override_mode);
ArrayNoArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
} else if (instr->arity() == 1) {
Label done;
......@@ -4012,17 +4012,19 @@ void LCodeGen::DoCallNewArray(LCallNewArray* instr) {
__ Branch(&packed_case, eq, t1, Operand(zero_reg));
ElementsKind holey_kind = GetHoleyElementsKind(kind);
ArraySingleArgumentConstructorStub stub(holey_kind, override_mode);
ArraySingleArgumentConstructorStub stub(isolate(),
holey_kind,
override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ jmp(&done);
__ bind(&packed_case);
}
ArraySingleArgumentConstructorStub stub(kind, override_mode);
ArraySingleArgumentConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
__ bind(&done);
} else {
ArrayNArgumentsConstructorStub stub(kind, override_mode);
ArrayNArgumentsConstructorStub stub(isolate(), kind, override_mode);
CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr);
}
}
......@@ -4430,7 +4432,7 @@ void LCodeGen::DoTransitionElementsKind(LTransitionElementsKind* instr) {
this, Safepoint::kWithRegistersAndDoubles);
__ li(a1, Operand(to_map));
bool is_js_array = from_map->instance_type() == JS_ARRAY_TYPE;
TransitionElementsKindStub stub(from_kind, to_kind, is_js_array);
TransitionElementsKindStub stub(isolate(), from_kind, to_kind, is_js_array);
__ CallStub(&stub);
RecordSafepointWithRegistersAndDoubles(
instr->pointer_map(), 0, Safepoint::kLazyDeopt);
......@@ -4454,7 +4456,8 @@ void LCodeGen::DoStringAdd(LStringAdd* instr) {
ASSERT(ToRegister(instr->context()).is(cp));
ASSERT(ToRegister(instr->left()).is(a1));
ASSERT(ToRegister(instr->right()).is(a0));
StringAddStub stub(instr->hydrogen()->flags(),
StringAddStub stub(isolate(),
instr->hydrogen()->flags(),
instr->hydrogen()->pretenure_flag());
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
}
......@@ -5452,7 +5455,8 @@ void LCodeGen::DoFunctionLiteral(LFunctionLiteral* instr) {
// space for nested functions that don't need literals cloning.
bool pretenure = instr->hydrogen()->pretenure();
if (!pretenure && instr->hydrogen()->has_no_literals()) {
FastNewClosureStub stub(instr->hydrogen()->strict_mode(),
FastNewClosureStub stub(isolate(),
instr->hydrogen()->strict_mode(),
instr->hydrogen()->is_generator());
__ li(a2, Operand(instr->hydrogen()->shared_info()));
CallCode(stub.GetCode(isolate()), RelocInfo::CODE_TARGET, instr);
......
......@@ -423,13 +423,15 @@ class LCodeGen: public LCodeGenBase {
switch (codegen_->expected_safepoint_kind_) {
case Safepoint::kWithRegisters: {
StoreRegistersStateStub stub1(kDontSaveFPRegs);
StoreRegistersStateStub stub1(codegen_->masm_->isolate(),
kDontSaveFPRegs);
codegen_->masm_->push(ra);
codegen_->masm_->CallStub(&stub1);
break;
}
case Safepoint::kWithRegistersAndDoubles: {
StoreRegistersStateStub stub2(kSaveFPRegs);
StoreRegistersStateStub stub2(codegen_->masm_->isolate(),
kSaveFPRegs);
codegen_->masm_->push(ra);
codegen_->masm_->CallStub(&stub2);
break;
......@@ -444,13 +446,15 @@ class LCodeGen: public LCodeGenBase {
ASSERT((kind & Safepoint::kWithRegisters) != 0);
switch (kind) {
case Safepoint::kWithRegisters: {
RestoreRegistersStateStub stub1(kDontSaveFPRegs);
RestoreRegistersStateStub stub1(codegen_->masm_->isolate(),
kDontSaveFPRegs);
codegen_->masm_->push(ra);
codegen_->masm_->CallStub(&stub1);
break;
}
case Safepoint::kWithRegistersAndDoubles: {
RestoreRegistersStateStub stub2(kSaveFPRegs);
RestoreRegistersStateStub stub2(codegen_->masm_->isolate(),
kSaveFPRegs);
codegen_->masm_->push(ra);
codegen_->masm_->CallStub(&stub2);
break;
......
......@@ -302,7 +302,8 @@ void MacroAssembler::RecordWrite(Register object,
if (ra_status == kRAHasNotBeenSaved) {
push(ra);
}
RecordWriteStub stub(object, value, address, remembered_set_action, fp_mode);
RecordWriteStub stub(isolate(), object, value, address, remembered_set_action,
fp_mode);
CallStub(&stub);
if (ra_status == kRAHasNotBeenSaved) {
pop(ra);
......@@ -352,7 +353,7 @@ void MacroAssembler::RememberedSetHelper(Register object, // For debug tests.
}
push(ra);
StoreBufferOverflowStub store_buffer_overflow =
StoreBufferOverflowStub(fp_mode);
StoreBufferOverflowStub(isolate(), fp_mode);
CallStub(&store_buffer_overflow);
pop(ra);
bind(&done);
......@@ -1456,7 +1457,7 @@ void MacroAssembler::TruncateDoubleToI(Register result,
Subu(sp, sp, Operand(kDoubleSize)); // Put input on stack.
sdc1(double_input, MemOperand(sp, 0));
DoubleToIStub stub(sp, result, 0, true, true);
DoubleToIStub stub(isolate(), sp, result, 0, true, true);
CallStub(&stub);
Addu(sp, sp, Operand(kDoubleSize));
......@@ -1477,7 +1478,8 @@ void MacroAssembler::TruncateHeapNumberToI(Register result, Register object) {
// If we fell through then inline version didn't succeed - call stub instead.
push(ra);
DoubleToIStub stub(object,
DoubleToIStub stub(isolate(),
object,
result,
HeapNumber::kValueOffset - kHeapObjectTag,
true,
......@@ -2691,7 +2693,7 @@ void MacroAssembler::Push(Handle<Object> handle) {
void MacroAssembler::DebugBreak() {
PrepareCEntryArgs(0);
PrepareCEntryFunction(ExternalReference(Runtime::kDebugBreak, isolate()));
CEntryStub ces(1);
CEntryStub ces(isolate(), 1);
ASSERT(AllowThisStubCall(&ces));
Call(ces.GetCode(isolate()), RelocInfo::DEBUG_BREAK);
}
......@@ -3966,7 +3968,7 @@ void MacroAssembler::CallApiFunctionAndReturn(
// Native call returns to the DirectCEntry stub which redirects to the
// return address pushed on stack (could have moved after GC).
// DirectCEntry stub itself is generated early and never moves.
DirectCEntryStub stub;
DirectCEntryStub stub(isolate());
stub.GenerateCall(this, t9);
if (FLAG_log_timer_events) {
......@@ -4216,7 +4218,7 @@ void MacroAssembler::CallRuntime(const Runtime::Function* f,
// smarter.
PrepareCEntryArgs(num_arguments);
PrepareCEntryFunction(ExternalReference(f, isolate()));
CEntryStub stub(1, save_doubles);
CEntryStub stub(isolate(), 1, save_doubles);
CallStub(&stub);
}
......@@ -4227,7 +4229,7 @@ void MacroAssembler::CallExternalReference(const ExternalReference& ext,
PrepareCEntryArgs(num_arguments);
PrepareCEntryFunction(ext);
CEntryStub stub(1);
CEntryStub stub(isolate(), 1);
CallStub(&stub, TypeFeedbackId::None(), al, zero_reg, Operand(zero_reg), bd);
}
......@@ -4256,7 +4258,7 @@ void MacroAssembler::TailCallRuntime(Runtime::FunctionId fid,
void MacroAssembler::JumpToExternalReference(const ExternalReference& builtin,
BranchDelaySlot bd) {
PrepareCEntryFunction(builtin);
CEntryStub stub(1);
CEntryStub stub(isolate(), 1);
Jump(stub.GetCode(isolate()),
RelocInfo::CODE_TARGET,
al,
......
......@@ -1096,7 +1096,7 @@ void RegExpMacroAssemblerMIPS::CallCheckStackGuardState(Register scratch) {
ExternalReference stack_guard_check =
ExternalReference::re_check_stack_guard_state(masm_->isolate());
__ li(t9, Operand(stack_guard_check));
DirectCEntryStub stub;
DirectCEntryStub stub(isolate());
stub.GenerateCall(masm_, t9);
// DirectCEntryStub allocated space for the C argument slots so we have to
......
......@@ -825,7 +825,7 @@ void StubCompiler::GenerateFastApiCall(MacroAssembler* masm,
__ li(api_function_address, Operand(ref));
// Jump to stub.
CallApiFunctionStub stub(is_store, call_data_undefined, argc);
CallApiFunctionStub stub(isolate, is_store, call_data_undefined, argc);
__ TailCallStub(&stub);
}
......@@ -1024,12 +1024,14 @@ void LoadStubCompiler::GenerateLoadField(Register reg,
Representation representation) {
if (!reg.is(receiver())) __ mov(receiver(), reg);
if (kind() == Code::LOAD_IC) {
LoadFieldStub stub(field.is_inobject(holder),
LoadFieldStub stub(isolate(),
field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
} else {
KeyedLoadFieldStub stub(field.is_inobject(holder),
KeyedLoadFieldStub stub(isolate(),
field.is_inobject(holder),
field.translate(holder),
representation);
GenerateTailCall(masm(), stub.GetCode(isolate()));
......@@ -1089,7 +1091,7 @@ void LoadStubCompiler::GenerateLoadCallback(
ExternalReference ref = ExternalReference(&fun, type, isolate());
__ li(getter_address_reg, Operand(ref));
CallApiGetterStub stub;
CallApiGetterStub stub(isolate());
__ TailCallStub(&stub);
}
......
......@@ -54,7 +54,8 @@ ConvertDToIFunc MakeConvertDToIFuncTrampoline(Isolate* isolate,
CHECK(buffer);
HandleScope handles(isolate);
MacroAssembler masm(isolate, buffer, static_cast<int>(actual_size));
DoubleToIStub stub(source_reg, destination_reg, 0, true, inline_fastpath);
DoubleToIStub stub(isolate, source_reg, destination_reg, 0, true,
inline_fastpath);
byte* start = stub.GetCode(isolate)->instruction_start();
Label done;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment