Commit e5f1b968 authored by Junliang Yan's avatar Junliang Yan Committed by Commit Bot

PPC/s390: Reland "[builtins] Introduce further constant & external reference indirections"

Port 3f99a376

Original Commit Message:

    This is a reland of f5d30851

    Original change's description:
    > [builtins] Introduce further constant & external reference indirections
    >
    > This introduces further indirections for embedded constants and
    > external references for builtins generated by the macro-assembler.
    > The used mechanisms (LookupConstant and LookupExternalReference) are
    > identical to what we already use in CSA.
    >
    > Almost all builtins are now isolate-independent in both release and
    > debug modes. snapshot_blob.bin is roughly 670K smaller in embedded
    > builds vs. non-embedded builds, while libv8.so is roughly 280K larger.
    >
    > Bug: v8:6666
    > Change-Id: I7a6c2193ef5a763e6cf7543dd51597d6fff6c110
    > Reviewed-on: https://chromium-review.googlesource.com/1006581
    > Commit-Queue: Jakob Gruber <jgruber@chromium.org>
    > Reviewed-by: Michael Starzinger <mstarzinger@chromium.org>
    > Cr-Commit-Position: refs/heads/master@{#52810}

R=jgruber@chromium.org, joransiu@ca.ibm.com, michael_dawson@ca.ibm.com
BUG=
LOG=N

Change-Id: I037faebce37a866091dc35e04500790591292622
Reviewed-on: https://chromium-review.googlesource.com/1031397Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Junliang Yan <jyan@ca.ibm.com>
Cr-Commit-Position: refs/heads/master@{#52855}
parent 6379e2a4
......@@ -19,7 +19,7 @@ namespace internal {
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
ExitFrameType exit_frame_type) {
__ mov(r15, Operand(ExternalReference::Create(address)));
__ Move(r15, ExternalReference::Create(address));
if (exit_frame_type == BUILTIN_EXIT) {
__ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
RelocInfo::CODE_TARGET);
......@@ -412,10 +412,7 @@ void Generate_JSConstructStubGeneric(MacroAssembler* masm,
}
__ bind(&do_throw);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
}
// Throw away the result of the constructor invocation and use the
// on-stack receiver as the result.
......@@ -630,7 +627,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// r6: argc
// r7: argv
// r0,r8-r9, cp may be clobbered
{
NoRootArrayScope no_root_array(masm);
ProfileEntryHookStub::MaybeCallEntryHook(masm);
__ InitializeRootRegister();
}
// Enter an internal frame.
{
......@@ -639,7 +640,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Setup the context (we need to use the caller context from the isolate).
ExternalReference context_address = ExternalReference::Create(
IsolateAddressId::kContextAddress, masm->isolate());
__ mov(cp, Operand(context_address));
__ Move(cp, context_address);
__ LoadP(cp, MemOperand(cp));
// Push the function and the receiver onto the stack.
......@@ -851,8 +852,8 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
Register scratch2 = bytecode;
DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
bytecode));
__ mov(bytecode_size_table,
Operand(ExternalReference::bytecode_size_table_address()));
__ Move(bytecode_size_table,
ExternalReference::bytecode_size_table_address());
// Check if the bytecode is a Wide or ExtraWide prefix bytecode.
Label process_bytecode, extra_wide;
......@@ -1027,9 +1028,9 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// handler at the current bytecode offset.
Label do_dispatch;
__ bind(&do_dispatch);
__ mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
__ Move(
kInterpreterDispatchTableRegister,
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
__ lbzx(r6, MemOperand(kInterpreterBytecodeArrayRegister,
kInterpreterBytecodeOffsetRegister));
__ ShiftLeftImm(r6, r6, Operand(kPointerSizeLog2));
......@@ -1257,9 +1258,9 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
__ mtlr(r0);
// Initialize the dispatch table register.
__ mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
__ Move(
kInterpreterDispatchTableRegister,
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
// Get the bytecode array pointer from the frame.
__ LoadP(kInterpreterBytecodeArrayRegister,
......
......@@ -19,7 +19,7 @@ namespace internal {
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
ExitFrameType exit_frame_type) {
__ mov(r7, Operand(ExternalReference::Create(address)));
__ Move(r7, ExternalReference::Create(address));
if (exit_frame_type == BUILTIN_EXIT) {
__ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
RelocInfo::CODE_TARGET);
......@@ -411,10 +411,7 @@ void Generate_JSConstructStubGeneric(MacroAssembler* masm,
__ b(&use_receiver);
}
__ bind(&do_throw);
{
FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL);
__ CallRuntime(Runtime::kThrowConstructorReturnedNonObject);
}
// Throw away the result of the constructor invocation and use the
// on-stack receiver as the result.
......@@ -632,7 +629,11 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// r5: argc
// r6: argv
// r0,r7-r9, cp may be clobbered
{
NoRootArrayScope no_root_array(masm);
ProfileEntryHookStub::MaybeCallEntryHook(masm);
__ InitializeRootRegister();
}
// Enter an internal frame.
{
......@@ -642,7 +643,7 @@ static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
// Setup the context (we need to use the caller context from the isolate).
ExternalReference context_address = ExternalReference::Create(
IsolateAddressId::kContextAddress, masm->isolate());
__ mov(cp, Operand(context_address));
__ Move(cp, context_address);
__ LoadP(cp, MemOperand(cp));
// Push the function and the receiver onto the stack.
......@@ -861,8 +862,8 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
Register scratch2 = bytecode;
DCHECK(!AreAliased(bytecode_array, bytecode_offset, bytecode_size_table,
bytecode));
__ mov(bytecode_size_table,
Operand(ExternalReference::bytecode_size_table_address()));
__ Move(bytecode_size_table,
ExternalReference::bytecode_size_table_address());
// Check if the bytecode is a Wide or ExtraWide prefix bytecode.
Label process_bytecode, extra_wide;
......@@ -1263,9 +1264,9 @@ static void Generate_InterpreterEnterBytecode(MacroAssembler* masm) {
Code::kHeaderSize - kHeapObjectTag));
// Initialize the dispatch table register.
__ mov(kInterpreterDispatchTableRegister,
Operand(ExternalReference::interpreter_dispatch_table_address(
masm->isolate())));
__ Move(
kInterpreterDispatchTableRegister,
ExternalReference::interpreter_dispatch_table_address(masm->isolate()));
// Get the bytecode array pointer from the frame.
__ LoadP(kInterpreterBytecodeArrayRegister,
......
......@@ -258,6 +258,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
// Called from C
__ function_descriptor();
{
NoRootArrayScope no_root_array(masm);
ProfileEntryHookStub::MaybeCallEntryHook(masm);
// PPC LINUX ABI:
......@@ -274,6 +276,7 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ LoadDoubleLiteral(kDoubleRegZero, Double(0.0), r0);
__ InitializeRootRegister();
}
// Push a frame with special values setup to mark it as an entry frame.
// r3: code entry
......
This diff is collapsed.
......@@ -220,6 +220,9 @@ class TurboAssembler : public Assembler {
void LoadPC(Register dst);
void ComputeCodeStartAddress(Register dst);
bool root_array_available() const { return root_array_available_; }
void set_root_array_available(bool v) { root_array_available_ = v; }
void StoreDouble(DoubleRegister src, const MemOperand& mem,
Register scratch = no_reg);
void StoreDoubleU(DoubleRegister src, const MemOperand& mem,
......@@ -432,6 +435,13 @@ class TurboAssembler : public Assembler {
void ShiftRightAlgPair(Register dst_low, Register dst_high, Register src_low,
Register src_high, uint32_t shift);
#endif
#ifdef V8_EMBEDDED_BUILTINS
void LookupConstant(Register destination, Handle<Object> object);
void LookupExternalReference(Register destination,
ExternalReference reference);
#endif // V8_EMBEDDED_BUILTINS
// Returns the size of a call in instructions. Note, the value returned is
// only valid as long as no entries are added to the constant pool between
// checking the call size and emitting the actual call.
......@@ -504,6 +514,7 @@ class TurboAssembler : public Assembler {
// Register move. May do nothing if the registers are identical.
void Move(Register dst, Smi* smi) { LoadSmiLiteral(dst, smi); }
void Move(Register dst, Handle<HeapObject> value);
void Move(Register dst, ExternalReference reference);
void Move(Register dst, Register src, Condition cond = al);
void Move(DoubleRegister dst, DoubleRegister src);
......@@ -669,6 +680,7 @@ class TurboAssembler : public Assembler {
static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
bool has_frame_ = false;
bool root_array_available_ = true;
Isolate* const isolate_;
// This handle will be patched with the code object on installation.
Handle<HeapObject> code_object_;
......
......@@ -241,6 +241,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
Label invoke, handler_entry, exit;
{
NoRootArrayScope no_root_array(masm);
ProfileEntryHookStub::MaybeCallEntryHook(masm);
// saving floating point registers
......@@ -301,10 +303,11 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
// Set up frame pointer for the frame to be pushed.
// Need to add kPointerSize, because sp has one extra
// frame already for the frame type being pushed later.
__ lay(fp,
MemOperand(sp, -EntryFrameConstants::kCallerFPOffset + kPointerSize));
__ lay(fp, MemOperand(
sp, -EntryFrameConstants::kCallerFPOffset + kPointerSize));
__ InitializeRootRegister();
}
// If this is the outermost JS call, set js_entry_sp value.
Label non_outermost_js;
......
This diff is collapsed.
......@@ -179,12 +179,18 @@ class TurboAssembler : public Assembler {
return code_object_;
}
#ifdef V8_EMBEDDED_BUILTINS
void LookupConstant(Register destination, Handle<Object> object);
void LookupExternalReference(Register destination,
ExternalReference reference);
#endif // V8_EMBEDDED_BUILTINS
// Returns the size of a call in instructions.
static int CallSize(Register target);
int CallSize(Address target, RelocInfo::Mode rmode, Condition cond = al);
// Jump, Call, and Ret pseudo instructions implementing inter-working.
void Jump(Register target);
void Jump(Register target, Condition cond = al);
void Jump(Address target, RelocInfo::Mode rmode, Condition cond = al,
CRegister cr = cr7);
void Jump(Handle<Code> code, RelocInfo::Mode rmode, Condition cond = al);
......@@ -222,6 +228,7 @@ class TurboAssembler : public Assembler {
// Register move. May do nothing if the registers are identical.
void Move(Register dst, Smi* smi) { LoadSmiLiteral(dst, smi); }
void Move(Register dst, Handle<HeapObject> value);
void Move(Register dst, ExternalReference reference);
void Move(Register dst, Register src, Condition cond = al);
void Move(DoubleRegister dst, DoubleRegister src);
......@@ -1014,6 +1021,9 @@ class TurboAssembler : public Assembler {
void ResetSpeculationPoisonRegister();
void ComputeCodeStartAddress(Register dst);
bool root_array_available() const { return root_array_available_; }
void set_root_array_available(bool v) { root_array_available_ = v; }
private:
static const int kSmiShift = kSmiTagSize + kSmiShiftSize;
......@@ -1026,6 +1036,7 @@ class TurboAssembler : public Assembler {
int num_double_arguments);
bool has_frame_ = false;
bool root_array_available_ = true;
Isolate* isolate_;
// This handle will be patched with the code object on installation.
Handle<HeapObject> code_object_;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment