Commit 3ca76de1 authored by Igor Sheludko's avatar Igor Sheludko Committed by V8 LUCI CQ

[ext-code-space] Use cage-friendly HeapObject::map() in compiler

This will allow making HeapObject::IsBlah() checks faster when external
code space is enabled.

Bug: v8:11880
Change-Id: I68473ba88063c555c90330c9748462adeb35aa0b
Reviewed-on: https://chromium-review.googlesource.com/c/v8/v8/+/3308797Reviewed-by: 's avatarJakob Gruber <jgruber@chromium.org>
Commit-Queue: Igor Sheludko <ishell@chromium.org>
Cr-Commit-Position: refs/heads/main@{#78156}
parent 391f9c25
...@@ -365,20 +365,22 @@ base::Optional<ObjectRef> GetOwnFastDataPropertyFromHeap( ...@@ -365,20 +365,22 @@ base::Optional<ObjectRef> GetOwnFastDataPropertyFromHeap(
base::Optional<Object> constant; base::Optional<Object> constant;
{ {
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
PtrComprCageBase cage_base = broker->cage_base();
// This check to ensure the live map is the same as the cached map to // This check to ensure the live map is the same as the cached map to
// to protect us against reads outside the bounds of the heap. This could // to protect us against reads outside the bounds of the heap. This could
// happen if the Ref was created in a prior GC epoch, and the object // happen if the Ref was created in a prior GC epoch, and the object
// shrunk in size. It might end up at the edge of a heap boundary. If // shrunk in size. It might end up at the edge of a heap boundary. If
// we see that the map is the same in this GC epoch, we are safe. // we see that the map is the same in this GC epoch, we are safe.
Map map = holder.object()->map(kAcquireLoad); Map map = holder.object()->map(cage_base, kAcquireLoad);
if (*holder.map().object() != map) { if (*holder.map().object() != map) {
TRACE_BROKER_MISSING(broker, "Map changed for " << holder); TRACE_BROKER_MISSING(broker, "Map changed for " << holder);
return {}; return {};
} }
if (field_index.is_inobject()) { if (field_index.is_inobject()) {
constant = holder.object()->RawInobjectPropertyAt(map, field_index); constant =
holder.object()->RawInobjectPropertyAt(cage_base, map, field_index);
if (!constant.has_value()) { if (!constant.has_value()) {
TRACE_BROKER_MISSING( TRACE_BROKER_MISSING(
broker, "Constant field in " << holder << " is unsafe to read"); broker, "Constant field in " << holder << " is unsafe to read");
...@@ -386,12 +388,12 @@ base::Optional<ObjectRef> GetOwnFastDataPropertyFromHeap( ...@@ -386,12 +388,12 @@ base::Optional<ObjectRef> GetOwnFastDataPropertyFromHeap(
} }
} else { } else {
Object raw_properties_or_hash = Object raw_properties_or_hash =
holder.object()->raw_properties_or_hash(kRelaxedLoad); holder.object()->raw_properties_or_hash(cage_base, kRelaxedLoad);
// Ensure that the object is safe to inspect. // Ensure that the object is safe to inspect.
if (broker->ObjectMayBeUninitialized(raw_properties_or_hash)) { if (broker->ObjectMayBeUninitialized(raw_properties_or_hash)) {
return {}; return {};
} }
if (!raw_properties_or_hash.IsPropertyArray()) { if (!raw_properties_or_hash.IsPropertyArray(cage_base)) {
TRACE_BROKER_MISSING( TRACE_BROKER_MISSING(
broker, broker,
"Expected PropertyArray for backing store in " << holder << "."); "Expected PropertyArray for backing store in " << holder << ".");
...@@ -931,8 +933,8 @@ bool JSFunctionRef::IsConsistentWithHeapState() const { ...@@ -931,8 +933,8 @@ bool JSFunctionRef::IsConsistentWithHeapState() const {
HeapObjectData::HeapObjectData(JSHeapBroker* broker, ObjectData** storage, HeapObjectData::HeapObjectData(JSHeapBroker* broker, ObjectData** storage,
Handle<HeapObject> object, ObjectDataKind kind) Handle<HeapObject> object, ObjectDataKind kind)
: ObjectData(broker, storage, object, kind), : ObjectData(broker, storage, object, kind),
map_(broker->GetOrCreateData(object->map(kAcquireLoad), map_(broker->GetOrCreateData(
kAssumeMemoryFence)) { object->map(broker->cage_base(), kAcquireLoad), kAssumeMemoryFence)) {
CHECK_IMPLIES(broker->mode() == JSHeapBroker::kSerialized, CHECK_IMPLIES(broker->mode() == JSHeapBroker::kSerialized,
kind == kBackgroundSerializedHeapObject); kind == kBackgroundSerializedHeapObject);
} }
...@@ -1678,7 +1680,8 @@ base::Optional<ObjectRef> JSObjectRef::RawInobjectPropertyAt( ...@@ -1678,7 +1680,8 @@ base::Optional<ObjectRef> JSObjectRef::RawInobjectPropertyAt(
Handle<Object> value; Handle<Object> value;
{ {
DisallowGarbageCollection no_gc; DisallowGarbageCollection no_gc;
Map current_map = object()->map(kAcquireLoad); PtrComprCageBase cage_base = broker()->cage_base();
Map current_map = object()->map(cage_base, kAcquireLoad);
// If the map changed in some prior GC epoch, our {index} could be // If the map changed in some prior GC epoch, our {index} could be
// outside the valid bounds of the cached map. // outside the valid bounds of the cached map.
...@@ -1688,7 +1691,7 @@ base::Optional<ObjectRef> JSObjectRef::RawInobjectPropertyAt( ...@@ -1688,7 +1691,7 @@ base::Optional<ObjectRef> JSObjectRef::RawInobjectPropertyAt(
} }
base::Optional<Object> maybe_value = base::Optional<Object> maybe_value =
object()->RawInobjectPropertyAt(current_map, index); object()->RawInobjectPropertyAt(cage_base, current_map, index);
if (!maybe_value.has_value()) { if (!maybe_value.has_value()) {
TRACE_BROKER_MISSING(broker(), TRACE_BROKER_MISSING(broker(),
"Unable to safely read property in " << *this); "Unable to safely read property in " << *this);
...@@ -2527,7 +2530,9 @@ base::Optional<ObjectRef> SourceTextModuleRef::import_meta() const { ...@@ -2527,7 +2530,9 @@ base::Optional<ObjectRef> SourceTextModuleRef::import_meta() const {
} }
base::Optional<MapRef> HeapObjectRef::map_direct_read() const { base::Optional<MapRef> HeapObjectRef::map_direct_read() const {
return TryMakeRef(broker(), object()->map(kAcquireLoad), kAssumeMemoryFence); PtrComprCageBase cage_base = broker()->cage_base();
return TryMakeRef(broker(), object()->map(cage_base, kAcquireLoad),
kAssumeMemoryFence);
} }
namespace { namespace {
...@@ -2562,7 +2567,7 @@ OddballType GetOddballType(Isolate* isolate, Map map) { ...@@ -2562,7 +2567,7 @@ OddballType GetOddballType(Isolate* isolate, Map map) {
HeapObjectType HeapObjectRef::GetHeapObjectType() const { HeapObjectType HeapObjectRef::GetHeapObjectType() const {
if (data_->should_access_heap()) { if (data_->should_access_heap()) {
Map map = Handle<HeapObject>::cast(object())->map(); Map map = Handle<HeapObject>::cast(object())->map(broker()->cage_base());
HeapObjectType::Flags flags(0); HeapObjectType::Flags flags(0);
if (map.is_undetectable()) flags |= HeapObjectType::kUndetectable; if (map.is_undetectable()) flags |= HeapObjectType::kUndetectable;
if (map.is_callable()) flags |= HeapObjectType::kCallable; if (map.is_callable()) flags |= HeapObjectType::kCallable;
......
...@@ -43,6 +43,9 @@ JSHeapBroker::JSHeapBroker(Isolate* isolate, Zone* broker_zone, ...@@ -43,6 +43,9 @@ JSHeapBroker::JSHeapBroker(Isolate* isolate, Zone* broker_zone,
bool tracing_enabled, bool is_concurrent_inlining, bool tracing_enabled, bool is_concurrent_inlining,
CodeKind code_kind) CodeKind code_kind)
: isolate_(isolate), : isolate_(isolate),
#if V8_COMPRESS_POINTERS
cage_base_(isolate),
#endif // V8_COMPRESS_POINTERS
zone_(broker_zone), zone_(broker_zone),
refs_(zone()->New<RefsMap>(kMinimalRefsBucketCount, AddressMatcher(), refs_(zone()->New<RefsMap>(kMinimalRefsBucketCount, AddressMatcher(),
zone())), zone())),
......
...@@ -114,6 +114,17 @@ class V8_EXPORT_PRIVATE JSHeapBroker { ...@@ -114,6 +114,17 @@ class V8_EXPORT_PRIVATE JSHeapBroker {
void InitializeAndStartSerializing(); void InitializeAndStartSerializing();
Isolate* isolate() const { return isolate_; } Isolate* isolate() const { return isolate_; }
// The pointer compression cage base value used for decompression of all
// tagged values except references to Code objects.
PtrComprCageBase cage_base() const {
#if V8_COMPRESS_POINTERS
return cage_base_;
#else
return PtrComprCageBase{};
#endif // V8_COMPRESS_POINTERS
}
Zone* zone() const { return zone_; } Zone* zone() const { return zone_; }
bool tracing_enabled() const { return tracing_enabled_; } bool tracing_enabled() const { return tracing_enabled_; }
bool is_concurrent_inlining() const { return is_concurrent_inlining_; } bool is_concurrent_inlining() const { return is_concurrent_inlining_; }
...@@ -413,6 +424,9 @@ class V8_EXPORT_PRIVATE JSHeapBroker { ...@@ -413,6 +424,9 @@ class V8_EXPORT_PRIVATE JSHeapBroker {
std::unique_ptr<CanonicalHandlesMap> canonical_handles); std::unique_ptr<CanonicalHandlesMap> canonical_handles);
Isolate* const isolate_; Isolate* const isolate_;
#if V8_COMPRESS_POINTERS
const PtrComprCageBase cage_base_;
#endif // V8_COMPRESS_POINTERS
Zone* const zone_; Zone* const zone_;
base::Optional<NativeContextRef> target_native_context_; base::Optional<NativeContextRef> target_native_context_;
RefsMap* refs_; RefsMap* refs_;
......
...@@ -121,8 +121,14 @@ Address AbstractCode::InstructionEnd() { ...@@ -121,8 +121,14 @@ Address AbstractCode::InstructionEnd() {
} }
} }
bool AbstractCode::contains(Address inner_pointer) { bool AbstractCode::contains(Isolate* isolate, Address inner_pointer) {
return (address() <= inner_pointer) && (inner_pointer <= address() + Size()); PtrComprCageBase cage_base(isolate);
if (IsCode(cage_base)) {
return GetCode().contains(isolate, inner_pointer);
} else {
return (address() <= inner_pointer) &&
(inner_pointer <= address() + Size(cage_base));
}
} }
CodeKind AbstractCode::kind() { CodeKind AbstractCode::kind() {
...@@ -435,7 +441,8 @@ bool Code::contains(Isolate* isolate, Address inner_pointer) { ...@@ -435,7 +441,8 @@ bool Code::contains(Isolate* isolate, Address inner_pointer) {
return true; return true;
} }
} }
return (address() <= inner_pointer) && (inner_pointer < address() + Size()); return (address() <= inner_pointer) &&
(inner_pointer < address() + CodeSize());
} }
// static // static
......
...@@ -748,7 +748,7 @@ class AbstractCode : public HeapObject { ...@@ -748,7 +748,7 @@ class AbstractCode : public HeapObject {
inline int SizeIncludingMetadata(); inline int SizeIncludingMetadata();
// Returns true if pc is inside this object's instructions. // Returns true if pc is inside this object's instructions.
inline bool contains(Address pc); inline bool contains(Isolate* isolate, Address pc);
// Returns the kind of the code. // Returns the kind of the code.
inline CodeKind kind(); inline CodeKind kind();
......
...@@ -341,9 +341,8 @@ Object JSObject::RawFastPropertyAt(PtrComprCageBase cage_base, ...@@ -341,9 +341,8 @@ Object JSObject::RawFastPropertyAt(PtrComprCageBase cage_base,
} }
} }
base::Optional<Object> JSObject::RawInobjectPropertyAt(Map original_map, base::Optional<Object> JSObject::RawInobjectPropertyAt(
FieldIndex index) const { PtrComprCageBase cage_base, Map original_map, FieldIndex index) const {
PtrComprCageBase cage_base = GetPtrComprCageBase(*this);
CHECK(index.is_inobject()); CHECK(index.is_inobject());
// This method implements a "snapshot" protocol to protect against reading out // This method implements a "snapshot" protocol to protect against reading out
...@@ -373,7 +372,7 @@ base::Optional<Object> JSObject::RawInobjectPropertyAt(Map original_map, ...@@ -373,7 +372,7 @@ base::Optional<Object> JSObject::RawInobjectPropertyAt(Map original_map,
// given by the map and it will be a valid Smi or object pointer. // given by the map and it will be a valid Smi or object pointer.
Object maybe_tagged_object = Object maybe_tagged_object =
TaggedField<Object>::Acquire_Load(cage_base, *this, index.offset()); TaggedField<Object>::Acquire_Load(cage_base, *this, index.offset());
if (original_map != map(kAcquireLoad)) return {}; if (original_map != map(cage_base, kAcquireLoad)) return {};
return maybe_tagged_object; return maybe_tagged_object;
} }
......
...@@ -665,8 +665,8 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> { ...@@ -665,8 +665,8 @@ class JSObject : public TorqueGeneratedJSObject<JSObject, JSReceiver> {
// in which this method is meant to be used, and what guarantees it // in which this method is meant to be used, and what guarantees it
// provides against invalid reads from another thread during object // provides against invalid reads from another thread during object
// mutation. // mutation.
inline base::Optional<Object> RawInobjectPropertyAt(Map original_map, inline base::Optional<Object> RawInobjectPropertyAt(
FieldIndex index) const; PtrComprCageBase cage_base, Map original_map, FieldIndex index) const;
inline void FastPropertyAtPut(FieldIndex index, Object value, inline void FastPropertyAtPut(FieldIndex index, Object value,
WriteBarrierMode mode = UPDATE_WRITE_BARRIER); WriteBarrierMode mode = UPDATE_WRITE_BARRIER);
......
...@@ -49,7 +49,7 @@ namespace internal { ...@@ -49,7 +49,7 @@ namespace internal {
static bool IsAddressWithinFuncCode(JSFunction function, Isolate* isolate, static bool IsAddressWithinFuncCode(JSFunction function, Isolate* isolate,
void* addr) { void* addr) {
i::AbstractCode code = function.abstract_code(isolate); i::AbstractCode code = function.abstract_code(isolate);
return code.contains(reinterpret_cast<Address>(addr)); return code.contains(isolate, reinterpret_cast<Address>(addr));
} }
static bool IsAddressWithinFuncCode(v8::Local<v8::Context> context, static bool IsAddressWithinFuncCode(v8::Local<v8::Context> context,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment