Commit b766299d authored by Jakob Gruber's avatar Jakob Gruber Committed by Commit Bot

[arm] Add missing RELATIVE_CODE_TARGET iteration

Code object iteration was missing logic for RELATIVE_CODE_TARGET
reloc entries. Garbage collection could thus miss objects that were
referenced only as targets of pc-relative calls or jumps.

RELATIVE_CODE_TARGETs are only used on arm, mips, and s390 and only
at mksnapshot-time.

This exposed another issue in that the interpreter entry trampoline
copy we generate for profiling *did* contain relative calls in
runtime-accessible code. This is a problem, since code space on arm is,
by default, too large to be fully addressable through pc-relative
calls. This CL thus also disables the related
FLAG_interpreted_frames_native_stack feature on arm.

Drive-by: Ensure the builtins constants table does not contain Code
objects.

Bug: v8:8713,v8:6666
Change-Id: Idd914b46970ad08f9091fc72113fa7aed2732e71
Reviewed-on: https://chromium-review.googlesource.com/c/1424866Reviewed-by: 's avatarSigurd Schneider <sigurds@chromium.org>
Reviewed-by: 's avatarMichael Lippautz <mlippautz@chromium.org>
Commit-Queue: Jakob Gruber <jgruber@chromium.org>
Cr-Commit-Position: refs/heads/master@{#59023}
parent ea513ab8
...@@ -79,9 +79,12 @@ Address RelocInfo::target_address_address() { ...@@ -79,9 +79,12 @@ Address RelocInfo::target_address_address() {
DCHECK(HasTargetAddressAddress()); DCHECK(HasTargetAddressAddress());
if (Assembler::IsMovW(Memory<int32_t>(pc_))) { if (Assembler::IsMovW(Memory<int32_t>(pc_))) {
return pc_; return pc_;
} else { } else if (Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc_))) {
DCHECK(Assembler::IsLdrPcImmediateOffset(Memory<int32_t>(pc_)));
return constant_pool_entry_address(); return constant_pool_entry_address();
} else {
DCHECK(Assembler::IsBOrBlPcImmediateOffset(Memory<int32_t>(pc_)));
DCHECK(IsRelativeCodeTarget(rmode_));
return pc_;
} }
} }
...@@ -295,6 +298,7 @@ Address Assembler::return_address_from_call_start(Address pc) { ...@@ -295,6 +298,7 @@ Address Assembler::return_address_from_call_start(Address pc) {
void Assembler::deserialization_set_special_target_at( void Assembler::deserialization_set_special_target_at(
Address constant_pool_entry, Code code, Address target) { Address constant_pool_entry, Code code, Address target) {
DCHECK(!Builtins::IsIsolateIndependentBuiltin(code));
Memory<Address>(constant_pool_entry) = target; Memory<Address>(constant_pool_entry) = target;
} }
......
...@@ -488,6 +488,11 @@ const Instr kPopRegPattern = al | B26 | L | 4 | PostIndex | sp.code() * B16; ...@@ -488,6 +488,11 @@ const Instr kPopRegPattern = al | B26 | L | 4 | PostIndex | sp.code() * B16;
// ldr rd, [pc, #offset] // ldr rd, [pc, #offset]
const Instr kLdrPCImmedMask = 15 * B24 | 7 * B20 | 15 * B16; const Instr kLdrPCImmedMask = 15 * B24 | 7 * B20 | 15 * B16;
const Instr kLdrPCImmedPattern = 5 * B24 | L | pc.code() * B16; const Instr kLdrPCImmedPattern = 5 * B24 | L | pc.code() * B16;
// Pc-relative call or jump to a signed imm24 offset.
// bl pc + #offset
// b pc + #offset
const Instr kBOrBlPCImmedMask = 0xE * B24;
const Instr kBOrBlPCImmedPattern = 0xA * B24;
// vldr dd, [pc, #offset] // vldr dd, [pc, #offset]
const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8; const Instr kVldrDPCMask = 15 * B24 | 3 * B20 | 15 * B16 | 15 * B8;
const Instr kVldrDPCPattern = 13 * B24 | L | pc.code() * B16 | 11 * B8; const Instr kVldrDPCPattern = 13 * B24 | L | pc.code() * B16 | 11 * B8;
...@@ -730,6 +735,9 @@ bool Assembler::IsLdrPcImmediateOffset(Instr instr) { ...@@ -730,6 +735,9 @@ bool Assembler::IsLdrPcImmediateOffset(Instr instr) {
return (instr & kLdrPCImmedMask) == kLdrPCImmedPattern; return (instr & kLdrPCImmedMask) == kLdrPCImmedPattern;
} }
bool Assembler::IsBOrBlPcImmediateOffset(Instr instr) {
return (instr & kBOrBlPCImmedMask) == kBOrBlPCImmedPattern;
}
bool Assembler::IsVldrDPcImmediateOffset(Instr instr) { bool Assembler::IsVldrDPcImmediateOffset(Instr instr) {
// Check the instruction is indeed a // Check the instruction is indeed a
......
...@@ -1157,6 +1157,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase { ...@@ -1157,6 +1157,7 @@ class V8_EXPORT_PRIVATE Assembler : public AssemblerBase {
static bool IsStrRegFpNegOffset(Instr instr); static bool IsStrRegFpNegOffset(Instr instr);
static bool IsLdrRegFpNegOffset(Instr instr); static bool IsLdrRegFpNegOffset(Instr instr);
static bool IsLdrPcImmediateOffset(Instr instr); static bool IsLdrPcImmediateOffset(Instr instr);
static bool IsBOrBlPcImmediateOffset(Instr instr);
static bool IsVldrDPcImmediateOffset(Instr instr); static bool IsVldrDPcImmediateOffset(Instr instr);
static bool IsBlxReg(Instr instr); static bool IsBlxReg(Instr instr);
static bool IsBlxIp(Instr instr); static bool IsBlxIp(Instr instr);
......
...@@ -140,8 +140,8 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) { ...@@ -140,8 +140,8 @@ TF_BUILTIN(FastNewClosure, ConstructorBuiltinsAssembler) {
StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset, StoreObjectFieldNoWriteBarrier(result, JSFunction::kSharedFunctionInfoOffset,
shared_function_info); shared_function_info);
StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context); StoreObjectFieldNoWriteBarrier(result, JSFunction::kContextOffset, context);
Handle<Code> lazy_builtin_handle( Handle<Code> lazy_builtin_handle =
isolate()->builtins()->builtin(Builtins::kCompileLazy), isolate()); isolate()->builtins()->builtin_handle(Builtins::kCompileLazy);
Node* lazy_builtin = HeapConstant(lazy_builtin_handle); Node* lazy_builtin = HeapConstant(lazy_builtin_handle);
StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin); StoreObjectFieldNoWriteBarrier(result, JSFunction::kCodeOffset, lazy_builtin);
Return(result); Return(result);
......
...@@ -40,6 +40,10 @@ uint32_t BuiltinsConstantsTableBuilder::AddObject(Handle<Object> object) { ...@@ -40,6 +40,10 @@ uint32_t BuiltinsConstantsTableBuilder::AddObject(Handle<Object> object) {
// Must be generating embedded builtin code. // Must be generating embedded builtin code.
DCHECK(isolate_->ShouldLoadConstantsFromRootList()); DCHECK(isolate_->ShouldLoadConstantsFromRootList());
// All code objects should be loaded through the root register or use
// pc-relative addressing.
DCHECK(!object->IsCode());
#endif #endif
uint32_t* maybe_key = map_.Find(object); uint32_t* maybe_key = map_.Find(object);
......
...@@ -1311,9 +1311,6 @@ DEFINE_BOOL(prof_browser_mode, true, ...@@ -1311,9 +1311,6 @@ DEFINE_BOOL(prof_browser_mode, true,
DEFINE_STRING(logfile, "v8.log", "Specify the name of the log file.") DEFINE_STRING(logfile, "v8.log", "Specify the name of the log file.")
DEFINE_BOOL(logfile_per_isolate, true, "Separate log files for each isolate.") DEFINE_BOOL(logfile_per_isolate, true, "Separate log files for each isolate.")
DEFINE_BOOL(ll_prof, false, "Enable low-level linux profiler.") DEFINE_BOOL(ll_prof, false, "Enable low-level linux profiler.")
DEFINE_BOOL(interpreted_frames_native_stack, false,
"Show interpreted frames on the native stack (useful for external "
"profilers).")
DEFINE_BOOL(perf_basic_prof, false, DEFINE_BOOL(perf_basic_prof, false,
"Enable perf linux profiler (basic support).") "Enable perf linux profiler (basic support).")
DEFINE_NEG_IMPLICATION(perf_basic_prof, compact_code_space) DEFINE_NEG_IMPLICATION(perf_basic_prof, compact_code_space)
...@@ -1351,6 +1348,18 @@ DEFINE_STRING(redirect_code_traces_to, nullptr, ...@@ -1351,6 +1348,18 @@ DEFINE_STRING(redirect_code_traces_to, nullptr,
DEFINE_BOOL(print_opt_source, false, DEFINE_BOOL(print_opt_source, false,
"print source code of optimized and inlined functions") "print source code of optimized and inlined functions")
#ifdef V8_TARGET_ARCH_ARM
// Unsupported on arm. See https://crbug.com/v8/8713.
DEFINE_BOOL_READONLY(
interpreted_frames_native_stack, false,
"Show interpreted frames on the native stack (useful for external "
"profilers).")
#else
DEFINE_BOOL(interpreted_frames_native_stack, false,
"Show interpreted frames on the native stack (useful for external "
"profilers).")
#endif
// //
// Disassembler only flags // Disassembler only flags
// //
......
...@@ -3283,17 +3283,22 @@ bool Isolate::Init(StartupDeserializer* des) { ...@@ -3283,17 +3283,22 @@ bool Isolate::Init(StartupDeserializer* des) {
builtins_constants_table_builder_ = new BuiltinsConstantsTableBuilder(this); builtins_constants_table_builder_ = new BuiltinsConstantsTableBuilder(this);
} }
setup_delegate_->SetupBuiltins(this); setup_delegate_->SetupBuiltins(this);
#ifndef V8_TARGET_ARCH_ARM
if (create_heap_objects) { if (create_heap_objects) {
// Create a copy of the the interpreter entry trampoline and store it // Create a copy of the the interpreter entry trampoline and store it
// on the root list. It is used as a template for further copies that // on the root list. It is used as a template for further copies that
// may later be created to help profile interpreted code. // may later be created to help profile interpreted code.
// TODO(jgruber): Merge this with the block below once // We currently cannot do this on arm due to RELATIVE_CODE_TARGETs
// FLAG_embedded_builtins is always true. // assuming that all possible Code targets may be addressed with an int24
// offset, effectively limiting code space size to 32MB. We can guarantee
// this at mksnapshot-time, but not at runtime.
// See also: https://crbug.com/v8/8713.
HandleScope handle_scope(this); HandleScope handle_scope(this);
Handle<Code> code = Handle<Code> code =
factory()->CopyCode(BUILTIN_CODE(this, InterpreterEntryTrampoline)); factory()->CopyCode(BUILTIN_CODE(this, InterpreterEntryTrampoline));
heap_.SetInterpreterEntryTrampolineForProfiling(*code); heap_.SetInterpreterEntryTrampolineForProfiling(*code);
} }
#endif
if (FLAG_embedded_builtins && create_heap_objects) { if (FLAG_embedded_builtins && create_heap_objects) {
builtins_constants_table_builder_->Finalize(); builtins_constants_table_builder_->Finalize();
delete builtins_constants_table_builder_; delete builtins_constants_table_builder_;
......
...@@ -593,18 +593,20 @@ class Code::BodyDescriptor final : public BodyDescriptorBase { ...@@ -593,18 +593,20 @@ class Code::BodyDescriptor final : public BodyDescriptorBase {
template <typename ObjectVisitor> template <typename ObjectVisitor>
static inline void IterateBody(Map map, HeapObject obj, ObjectVisitor* v) { static inline void IterateBody(Map map, HeapObject obj, ObjectVisitor* v) {
int mode_mask = RelocInfo::ModeMask(RelocInfo::CODE_TARGET) | static constexpr int kModeMask =
RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) | RelocInfo::ModeMask(RelocInfo::CODE_TARGET) |
RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::RELATIVE_CODE_TARGET) |
RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) | RelocInfo::ModeMask(RelocInfo::EMBEDDED_OBJECT) |
RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) | RelocInfo::ModeMask(RelocInfo::EXTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) | RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE) |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY); RelocInfo::ModeMask(RelocInfo::INTERNAL_REFERENCE_ENCODED) |
RelocInfo::ModeMask(RelocInfo::OFF_HEAP_TARGET) |
RelocInfo::ModeMask(RelocInfo::RUNTIME_ENTRY);
// GC does not visit data/code in the header and in the body directly. // GC does not visit data/code in the header and in the body directly.
IteratePointers(obj, kRelocationInfoOffset, kDataStart, v); IteratePointers(obj, kRelocationInfoOffset, kDataStart, v);
RelocIterator it(Code::cast(obj), mode_mask); RelocIterator it(Code::cast(obj), kModeMask);
v->VisitRelocInfo(&it); v->VisitRelocInfo(&it);
} }
......
...@@ -382,9 +382,9 @@ bool RelocInfo::HasTargetAddressAddress() const { ...@@ -382,9 +382,9 @@ bool RelocInfo::HasTargetAddressAddress() const {
ModeMask(RUNTIME_ENTRY) | ModeMask(WASM_CALL) | ModeMask(WASM_STUB_CALL); ModeMask(RUNTIME_ENTRY) | ModeMask(WASM_CALL) | ModeMask(WASM_STUB_CALL);
#else #else
static constexpr int kTargetAddressAddressModeMask = static constexpr int kTargetAddressAddressModeMask =
ModeMask(CODE_TARGET) | ModeMask(EMBEDDED_OBJECT) | ModeMask(CODE_TARGET) | ModeMask(RELATIVE_CODE_TARGET) |
ModeMask(EXTERNAL_REFERENCE) | ModeMask(OFF_HEAP_TARGET) | ModeMask(EMBEDDED_OBJECT) | ModeMask(EXTERNAL_REFERENCE) |
ModeMask(RUNTIME_ENTRY) | ModeMask(WASM_CALL); ModeMask(OFF_HEAP_TARGET) | ModeMask(RUNTIME_ENTRY) | ModeMask(WASM_CALL);
#endif #endif
return (ModeMask(rmode_) & kTargetAddressAddressModeMask) != 0; return (ModeMask(rmode_) & kTargetAddressAddressModeMask) != 0;
} }
......
...@@ -855,6 +855,9 @@ void Serializer::ObjectSerializer::VisitRelocInfo(RelocIterator* it) { ...@@ -855,6 +855,9 @@ void Serializer::ObjectSerializer::VisitRelocInfo(RelocIterator* it) {
void Serializer::ObjectSerializer::VisitCodeTarget(Code host, void Serializer::ObjectSerializer::VisitCodeTarget(Code host,
RelocInfo* rinfo) { RelocInfo* rinfo) {
#ifdef V8_TARGET_ARCH_ARM
DCHECK(!RelocInfo::IsRelativeCodeTarget(rinfo->rmode()));
#endif
int skip = SkipTo(rinfo->target_address_address()); int skip = SkipTo(rinfo->target_address_address());
Code object = Code::GetCodeFromTargetAddress(rinfo->target_address()); Code object = Code::GetCodeFromTargetAddress(rinfo->target_address());
serializer_->SerializeObject(object, kFromCode, kInnerPointer, skip); serializer_->SerializeObject(object, kFromCode, kInnerPointer, skip);
......
...@@ -5008,6 +5008,7 @@ TEST(InterpreterGenerators) { ...@@ -5008,6 +5008,7 @@ TEST(InterpreterGenerators) {
} }
} }
#ifndef V8_TARGET_ARCH_ARM
TEST(InterpreterWithNativeStack) { TEST(InterpreterWithNativeStack) {
i::FLAG_interpreted_frames_native_stack = true; i::FLAG_interpreted_frames_native_stack = true;
...@@ -5029,6 +5030,7 @@ TEST(InterpreterWithNativeStack) { ...@@ -5029,6 +5030,7 @@ TEST(InterpreterWithNativeStack) {
CHECK(code->is_interpreter_trampoline_builtin()); CHECK(code->is_interpreter_trampoline_builtin());
CHECK_NE(code->address(), interpreter_entry_trampoline->address()); CHECK_NE(code->address(), interpreter_entry_trampoline->address());
} }
#endif // V8_TARGET_ARCH_ARM
TEST(InterpreterGetBytecodeHandler) { TEST(InterpreterGetBytecodeHandler) {
HandleAndZoneScope handles; HandleAndZoneScope handles;
......
...@@ -636,6 +636,7 @@ TEST(LogAll) { ...@@ -636,6 +636,7 @@ TEST(LogAll) {
isolate->Dispose(); isolate->Dispose();
} }
#ifndef V8_TARGET_ARCH_ARM
TEST(LogInterpretedFramesNativeStack) { TEST(LogInterpretedFramesNativeStack) {
SETUP_FLAGS(); SETUP_FLAGS();
i::FLAG_interpreted_frames_native_stack = true; i::FLAG_interpreted_frames_native_stack = true;
...@@ -658,6 +659,7 @@ TEST(LogInterpretedFramesNativeStack) { ...@@ -658,6 +659,7 @@ TEST(LogInterpretedFramesNativeStack) {
} }
isolate->Dispose(); isolate->Dispose();
} }
#endif // V8_TARGET_ARCH_ARM
TEST(ExternalCodeEventListener) { TEST(ExternalCodeEventListener) {
i::FLAG_log = false; i::FLAG_log = false;
...@@ -760,6 +762,7 @@ TEST(ExternalCodeEventListenerInnerFunctions) { ...@@ -760,6 +762,7 @@ TEST(ExternalCodeEventListenerInnerFunctions) {
isolate2->Dispose(); isolate2->Dispose();
} }
#ifndef V8_TARGET_ARCH_ARM
TEST(ExternalCodeEventListenerWithInterpretedFramesNativeStack) { TEST(ExternalCodeEventListenerWithInterpretedFramesNativeStack) {
i::FLAG_log = false; i::FLAG_log = false;
i::FLAG_prof = false; i::FLAG_prof = false;
...@@ -809,6 +812,7 @@ TEST(ExternalCodeEventListenerWithInterpretedFramesNativeStack) { ...@@ -809,6 +812,7 @@ TEST(ExternalCodeEventListenerWithInterpretedFramesNativeStack) {
} }
isolate->Dispose(); isolate->Dispose();
} }
#endif // V8_TARGET_ARCH_ARM
TEST(TraceMaps) { TEST(TraceMaps) {
SETUP_FLAGS(); SETUP_FLAGS();
......
...@@ -65,11 +65,13 @@ bool IsInitiallyMutable(Factory* factory, Address object_address) { ...@@ -65,11 +65,13 @@ bool IsInitiallyMutable(Factory* factory, Address object_address) {
// The CHECK_EQ line is there just to ensure that the root is publicly // The CHECK_EQ line is there just to ensure that the root is publicly
// accessible from Heap, but ultimately the factory is used as it provides // accessible from Heap, but ultimately the factory is used as it provides
// handles that have the address in the root table. // handles that have the address in the root table.
#define CHECK_NOT_IN_RO_SPACE(type, name, CamelName) \ #define CHECK_NOT_IN_RO_SPACE(type, name, CamelName) \
Handle<Object> name = factory->name(); \ Handle<Object> name = factory->name(); \
CHECK_EQ(*name, heap->name()); \ CHECK_EQ(*name, heap->name()); \
if (name->IsHeapObject() && IsInitiallyMutable(factory, name.address())) \ if (name->IsHeapObject() && IsInitiallyMutable(factory, name.address()) && \
CHECK_NE(RO_SPACE, GetSpaceFromObject(HeapObject::cast(*name))); !name->IsUndefined(CcTest::i_isolate())) { \
CHECK_NE(RO_SPACE, GetSpaceFromObject(HeapObject::cast(*name))); \
}
// The following tests check that all the roots accessible via public Heap // The following tests check that all the roots accessible via public Heap
// accessors are not in RO_SPACE with the exception of the objects listed in // accessors are not in RO_SPACE with the exception of the objects listed in
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment